Skip to content

Commit

Permalink
FineTuneJob - fixing of the Either types
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Nov 24, 2023
1 parent dfd6a99 commit 734d5d0
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ case class FineTuneEvent(
case class FineTuneHyperparams(
// Number of examples in each batch or "auto".
// A larger batch size means that model parameters are updated less frequently, but with lower variance.
batch_size: Option[Either[String, Int]],
batch_size: Option[Either[Int, String]],

// Scaling factor for the learning rate or "auto". A smaller learning rate may be useful to avoid overfitting.
learning_rate_multiplier: Option[Either[String, Int]],
learning_rate_multiplier: Option[Either[Int, String]],

// the number of epochs or "auto" (if not specified initially)
// "auto" decides the optimal number of epochs based on the size of the dataset.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ case class CreateFineTuneSettings(
// Number of examples in each batch or "auto".
// A larger batch size means that model parameters are updated less frequently, but with lower variance.
// Defaults to auto
batch_size: Option[Int],
batch_size: Option[Int] = None,

// Scaling factor for the learning rate or "auto". A smaller learning rate may be useful to avoid overfitting.
// Defaults to auto
learning_rate_multiplier: Option[Int],
learning_rate_multiplier: Option[Int] = None,

// The number of epochs to train the model for.
// An epoch refers to one full cycle through the training dataset.
Expand Down

0 comments on commit 734d5d0

Please sign in to comment.