From 38e6e5f648ce0a78db56e9bac1f687ee00cde3a0 Mon Sep 17 00:00:00 2001 From: Artem Chumachenko Date: Thu, 8 Aug 2024 22:34:49 +0200 Subject: [PATCH] Fix default value for the LoRA flag --- pyproject.toml | 2 +- src/together/resources/finetune.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f2175990..93b94787 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api" [tool.poetry] name = "together" -version = "1.2.6" +version = "1.2.7" authors = [ "Together AI " ] diff --git a/src/together/resources/finetune.py b/src/together/resources/finetune.py index 4ad6c84f..ec092316 100644 --- a/src/together/resources/finetune.py +++ b/src/together/resources/finetune.py @@ -33,7 +33,7 @@ def create( n_checkpoints: int | None = 1, batch_size: int | None = 16, learning_rate: float | None = 0.00001, - lora: bool = True, + lora: bool = False, lora_r: int | None = 8, lora_dropout: float | None = 0, lora_alpha: float | None = 8, @@ -108,6 +108,12 @@ def create( "The default value of batch size has been changed from 32 to 16 since together version >= 1.2.6" ) + # TODO: Remove after next LoRA default change + log_warn( + "Some of the jobs run _directly_ from the together-python library might be trained using LoRA adapters. " + "The version range when this change occurred is from 1.2.3 to 1.2.6." + ) + return FinetuneResponse(**response.data) def list(self) -> FinetuneList: