Skip to content

Commit

Permalink
Fine-tuning endpoint changed to "/fine_tuning/jobs". Closes #42
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Sep 21, 2023
1 parent 23a3bc7 commit 25ee71f
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ object EndPoint extends Enumeration {
case object audio_transcriptions extends EndPoint("audio/transcriptions")
case object audio_translations extends EndPoint("audio/translations")
case object files extends EndPoint
case object fine_tunes extends EndPoint("fine-tunes")
case object fine_tunes extends EndPoint("fine_tuning/jobs")
case object moderations extends EndPoint
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ private trait OpenAIServiceImpl extends OpenAICoreServiceImpl with OpenAIService
EndPoint.fine_tunes,
endPointParam = Some(s"$fineTuneId/events"),
params = Seq(
Param.stream -> Some(false)
Param.stream -> Some(false) // TODO: is streaming still supported?
)
).map { response =>
handleNotFoundAndError(response).map(jsResponse =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package io.cequence.openaiscala.domain.response

import java.{util => ju}

// TODO: adapt
case class FineTuneJob(
id: String,
model: String,
Expand All @@ -10,10 +11,10 @@ case class FineTuneJob(
fine_tuned_model: Option[String],
hyperparams: FineTuneHyperparams,
organization_id: String,
result_files: Seq[FileInfo],
status: String, // e.g. pending or cancelled
validation_files: Seq[FileInfo],
training_files: Seq[FileInfo],
result_files: Seq[FileInfo],
updated_at: ju.Date
)

Expand All @@ -23,6 +24,7 @@ case class FineTuneEvent(
message: String
)

// TODO: adapt
case class FineTuneHyperparams(
batch_size: Option[Int],
learning_rate_multiplier: Option[Double],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,41 +16,48 @@ case class CreateFineTuneSettings(
// The batch size is the number of training examples used to train a single forward and backward pass.
// By default, the batch size will be dynamically configured to be ~0.2% of the number of examples in the training set,
// capped at 256 - in general, we've found that larger batch sizes tend to work better for larger datasets.
// TODO: is it still supported?
batch_size: Option[Int] = None,

// The learning rate multiplier to use for training.
// The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.
// By default, the learning rate multiplier is the 0.05, 0.1, or 0.2 depending on final batch_size (larger learning rates tend to perform better with larger batch sizes).
// We recommend experimenting with values in the range 0.02 to 0.2 to see what produces the best results.
// TODO: is it still supported?
learning_rate_multiplier: Option[Double] = None,

// The weight to use for loss on the prompt tokens.
// This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0),
// and can add a stabilizing effect to training when completions are short.
// If prompts are extremely long (relative to completions), it may make sense to reduce this weight so as to avoid over-prioritizing learning the prompt.
// Defaults to 0.01
// TODO: is it still supported?
prompt_loss_weight: Option[Double] = None,

// If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch.
// These metrics can be viewed in the <a href="https://beta.openai.com/docs/guides/fine-tuning/analyzing-your-fine-tuned-model">results file</a>.
// In order to compute classification metrics, you must provide a validation_file.
// Additionally, you must specify classification_n_classes for multiclass classification or classification_positive_class for binary classification.
// Defaults to false
// TODO: is it still supported?
compute_classification_metrics: Option[Boolean] = None,

// The number of classes in a classification task.
// This parameter is required for multiclass classification.
// TODO: is it still supported?
classification_n_classes: Option[Int] = None,

// The positive class in binary classification.
// This parameter is needed to generate precision, recall, and F1 metrics when doing binary classification.
// TODO: is it still supported?
classification_positive_class: Option[String] = None,

// If this is provided, we calculate F-beta scores at the specified beta values.
// The F-beta score is a generalization of F-1 score. This is only used for binary classification.
// With a beta of 1 (i.e. the F-1 score), precision and recall are given the same weight.
// A larger beta score puts more weight on recall and less on precision.
// A smaller beta score puts more weight on precision and less on recall.
// TODO: is it still supported?
classification_betas: Option[Seq[Double]] = None,

// A string of up to 40 characters that will be added to your fine-tuned model name.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@ trait OpenAIService extends OpenAICoreService with OpenAIServiceConsts {
* @see
* <a href="https://platform.openai.com/docs/api-reference/fine-tunes/list">OpenAI Doc</a>
*/
// TODO: add pagination: after: Option[String], limit: Option[Int]
def listFineTunes: Future[Seq[FineTuneJob]]

/**
Expand Down Expand Up @@ -366,6 +367,7 @@ trait OpenAIService extends OpenAICoreService with OpenAIServiceConsts {
* <a href="https://platform.openai.com/docs/api-reference/fine-tunes/events">OpenAI
* Doc</a>
*/
// TODO: add pagination: after: Option[String], limit: Option[Int]
def listFineTuneEvents(
fineTuneId: String
): Future[Option[Seq[FineTuneEvent]]]
Expand Down

0 comments on commit 25ee71f

Please sign in to comment.