Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add fine tuning api #283

Merged
merged 11 commits into from
Feb 26, 2025
104 changes: 103 additions & 1 deletion core/src/main/scala/sttp/openai/OpenAI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ import sttp.openai.requests.completions.chat.ChatRequestResponseData.ChatRespons
import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsBody
import sttp.openai.requests.embeddings.EmbeddingsResponseBody.EmbeddingResponse
import sttp.openai.requests.files.FilesResponseData._
import sttp.openai.requests.finetuning
import sttp.openai.requests.finetuning._
import sttp.openai.requests.images.ImageResponseData.ImageResponse
import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody
import sttp.openai.requests.images.edit.ImageEditsConfig
Expand Down Expand Up @@ -82,7 +84,7 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) {
* @param completionBody
* Create completion request body.
* @deprecated
* This is marked as Legacy in OpenAI API and might be removed in the future. Please use createChatCompletion instead.
* This is marked as Legacy in OpenAI API and might be removed in the future. Please use [[createChatCompletion]] instead.
*/
def createCompletion(completionBody: CompletionsBody): Request[Either[OpenAIException, CompletionsResponse]] =
openAIAuthRequest
Expand Down Expand Up @@ -541,6 +543,100 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) {
}
.response(asJson_parseErrors[AudioResponse])

/** Creates a fine-tuning job which begins the process of creating a new model from a given dataset.
*
* Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/create]]
*
* @param fineTuningRequestBody
* Request body that will be used to create a fine-tuning job.
*/
def createFineTuningJob(fineTuningRequestBody: FineTuningJobRequestBody): Request[Either[OpenAIException, FineTuningJobResponse]] =
openAIAuthRequest
.post(openAIUris.FineTuningJobs)
.body(fineTuningRequestBody)
.response(asJson_parseErrors[FineTuningJobResponse])

/** List your organization's fine-tuning jobs
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list]]
*/
def listFineTuningJobs(
queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty
): Request[Either[OpenAIException, ListFineTuningJobResponse]] = {
val uri = openAIUris.FineTuningJobs
.withParams(queryParameters.toMap)

openAIAuthRequest
.get(uri)
.response(asJson_parseErrors[ListFineTuningJobResponse])
}

/** Get status updates for a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list-events]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to get checkpoints for.
*/
def listFineTuningJobEvents(
fineTuningJobId: String,
queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty
): Request[Either[OpenAIException, ListFineTuningJobEventResponse]] = {
val uri = openAIUris
.fineTuningJobEvents(fineTuningJobId)
.withParams(queryParameters.toMap)

openAIAuthRequest
.get(uri)
.response(asJson_parseErrors[ListFineTuningJobEventResponse])
}

/** List checkpoints for a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list-checkpoints]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to get checkpoints for.
*/
def listFineTuningJobCheckpoints(
fineTuningJobId: String,
queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty
): Request[Either[OpenAIException, ListFineTuningJobCheckpointResponse]] = {
val uri = openAIUris
.fineTuningJobCheckpoints(fineTuningJobId)
.withParams(queryParameters.toMap)

openAIAuthRequest
.get(uri)
.response(asJson_parseErrors[ListFineTuningJobCheckpointResponse])
}

/** Get info about a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/retrieve]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job.
*/
def retrieveFineTuningJob(fineTuningJobId: String): Request[Either[OpenAIException, FineTuningJobResponse]] =
openAIAuthRequest
.get(openAIUris.fineTuningJob(fineTuningJobId))
.response(asJson_parseErrors[FineTuningJobResponse])

/** Immediately cancel a fine-tune job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/cancel]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to cancel.
*/
def cancelFineTuningJob(fineTuningJobId: String): Request[Either[OpenAIException, FineTuningJobResponse]] =
openAIAuthRequest
.post(openAIUris.cancelFineTuningJob(fineTuningJobId))
.response(asJson_parseErrors[FineTuningJobResponse])

/** Gets info about the fine-tune job.
*
* [[https://platform.openai.com/docs/api-reference/embeddings/create]]
Expand Down Expand Up @@ -1036,6 +1132,7 @@ private class OpenAIUris(val baseUri: Uri) {
val Files: Uri = uri"$baseUri/files"
val Models: Uri = uri"$baseUri/models"
val Moderations: Uri = uri"$baseUri/moderations"
val FineTuningJobs: Uri = uri"$baseUri/fine_tuning/jobs"
val Transcriptions: Uri = audioBase.addPath("transcriptions")
val Translations: Uri = audioBase.addPath("translations")
val VariationsImage: Uri = imageBase.addPath("variations")
Expand All @@ -1045,6 +1142,11 @@ private class OpenAIUris(val baseUri: Uri) {
val ThreadsRuns: Uri = uri"$baseUri/threads/runs"
val VectorStores: Uri = uri"$baseUri/vector_stores"

def fineTuningJob(fineTuningJobId: String): Uri = FineTuningJobs.addPath(fineTuningJobId)
def fineTuningJobEvents(fineTuningJobId: String): Uri = fineTuningJob(fineTuningJobId).addPath("events")
def fineTuningJobCheckpoints(fineTuningJobId: String): Uri = fineTuningJob(fineTuningJobId).addPath("checkpoints")
def cancelFineTuningJob(fineTuningJobId: String): Uri = fineTuningJob(fineTuningJobId).addPath("cancel")

def file(fileId: String): Uri = Files.addPath(fileId)
def fileContent(fileId: String): Uri = Files.addPath(fileId, "content")
def model(modelId: String): Uri = Models.addPath(modelId)
Expand Down
67 changes: 67 additions & 0 deletions core/src/main/scala/sttp/openai/OpenAISyncClient.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import sttp.openai.requests.completions.chat.ChatRequestResponseData.ChatRespons
import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsBody
import sttp.openai.requests.embeddings.EmbeddingsResponseBody.EmbeddingResponse
import sttp.openai.requests.files.FilesResponseData.{DeletedFileData, FileData, FilesResponse}
import sttp.openai.requests.finetuning
import sttp.openai.requests.finetuning._
import sttp.openai.requests.images.ImageResponseData.ImageResponse
import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody
import sttp.openai.requests.images.edit.ImageEditsConfig
Expand Down Expand Up @@ -348,6 +350,71 @@ class OpenAISyncClient private (
def createTranscription(transcriptionConfig: TranscriptionConfig): AudioResponse =
sendOrThrow(openAI.createTranscription(transcriptionConfig))

/** Creates a fine-tuning job which begins the process of creating a new model from a given dataset.
*
* Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/create]]
*
* @param fineTuningRequestBody
* Request body that will be used to create a fine-tuning job.
*/
def createFineTuningJob(fineTuningRequestBody: FineTuningJobRequestBody): FineTuningJobResponse =
sendOrThrow(openAI.createFineTuningJob(fineTuningRequestBody))

/** List your organization's fine-tuning jobs
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list]]
*/
def listFineTuningJobs(queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty): ListFineTuningJobResponse =
sendOrThrow(openAI.listFineTuningJobs(queryParameters))

/** Get status updates for a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list-events]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to get checkpoints for.
*/
def listFineTuningJobEvents(
fineTuningJobId: String,
queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty
): ListFineTuningJobEventResponse =
sendOrThrow(openAI.listFineTuningJobEvents(fineTuningJobId, queryParameters))

/** List checkpoints for a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/list-checkpoints]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to get checkpoints for.
*/
def listFineTuningJobCheckpoints(
fineTuningJobId: String,
queryParameters: finetuning.QueryParameters = finetuning.QueryParameters.empty
): ListFineTuningJobCheckpointResponse =
sendOrThrow(openAI.listFineTuningJobCheckpoints(fineTuningJobId, queryParameters))

/** Get info about a fine-tuning job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/retrieve]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job.
*/
def retrieveFineTuningJob(fineTuningJobId: String): FineTuningJobResponse =
sendOrThrow(openAI.retrieveFineTuningJob(fineTuningJobId))

/** Immediately cancel a fine-tune job.
*
* [[https://platform.openai.com/docs/api-reference/fine-tuning/cancel]]
*
* @param fineTuningJobId
* The ID of the fine-tuning job to cancel.
*/
def cancelFineTuningJob(fineTuningJobId: String): FineTuningJobResponse =
sendOrThrow(openAI.cancelFineTuningJob(fineTuningJobId))

/** Gets info about the fine-tune job.
*
* [[https://platform.openai.com/docs/api-reference/embeddings/create]]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package sttp.openai.requests.finetuning

import sttp.openai.OpenAIExceptions.OpenAIException.DeserializationOpenAIException
import sttp.openai.json.SnakePickle
import ujson.Str

/** @param model
* The name of the model to fine-tune. You can select one of the supported models
* [[https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned]].
* @param trainingFile
* The ID of an uploaded file that contains training data. See upload file for how to upload a file. Your dataset must be formatted as a
* JSONL file. Additionally, you must upload your file with the purpose fine-tune. The contents of the file should differ depending on if
* the model uses the chat, completions format, or if the fine-tuning method uses the preference format. See the fine-tuning guide for
* more details.
* @param suffix
* A string of up to 64 characters that will be added to your fine-tuned model name. For example, a suffix of "custom-model-name" would
* produce a model name like ft:gpt-4o-mini:openai:custom-model-name:7p4lURel.
* @param validationFile
* The ID of an uploaded file that contains validation data. If you provide this file, the data is used to generate validation metrics
* periodically during fine-tuning. These metrics can be viewed in the fine-tuning results file. The same data should not be present in
* both train and validation files. Your dataset must be formatted as a JSONL file. You must upload your file with the purpose fine-tune.
* See the fine-tuning guide for more details.
* @param integrations
* A list of integrations to enable for your fine-tuning job.
* @param seed
* The seed controls the reproducibility of the job. Passing in the same seed and job parameters should produce the same results, but may
* differ in rare cases. If a seed is not specified, one will be generated for you.
* @param method
* The method used for fine-tuning.
*/
case class FineTuningJobRequestBody(
model: FineTuningModel,
trainingFile: String,
suffix: Option[String] = None,
validationFile: Option[String] = None,
integrations: Option[Seq[Integration]] = None,
seed: Option[Int] = None,
method: Option[Method] = None
)
object FineTuningJobRequestBody {
implicit val fineTuningRequestBodyWriter: SnakePickle.Writer[FineTuningJobRequestBody] = SnakePickle.macroW[FineTuningJobRequestBody]
}

sealed abstract class FineTuningModel(val value: String)

object FineTuningModel {

implicit val fineTuningModelRW: SnakePickle.ReadWriter[FineTuningModel] = SnakePickle
.readwriter[ujson.Value]
.bimap[FineTuningModel](
model => SnakePickle.writeJs(model.value),
jsonValue =>
SnakePickle.read[ujson.Value](jsonValue) match {
case Str(value) =>
byFineTuningModelValue.getOrElse(value, CustomFineTuningModel(value))
case e => throw DeserializationOpenAIException(new Exception(s"Could not deserialize: $e"))
}
)

case object GPT4o20240806 extends FineTuningModel("gpt-4o-2024-08-06")

case object GPT4oMini20240718 extends FineTuningModel("gpt-4o-mini-2024-07-18")

case object GPT40613 extends FineTuningModel("gpt-4-0613")

case object GPT35Turbo0125 extends FineTuningModel("gpt-3.5-turbo-0125")

case object GPT35Turbo1106 extends FineTuningModel("gpt-3.5-turbo-1106")

case object GPT35Turbo0613 extends FineTuningModel("gpt-3.5-turbo-0613")

case class CustomFineTuningModel(customFineTuningModel: String) extends FineTuningModel(customFineTuningModel)

val values: Set[FineTuningModel] = Set(GPT4o20240806, GPT4oMini20240718, GPT40613, GPT35Turbo0125, GPT35Turbo1106, GPT35Turbo0613)

private val byFineTuningModelValue = values.map(model => model.value -> model).toMap

}
Loading