From 4590e406bd021c3ac9bb2080beea55361df670d1 Mon Sep 17 00:00:00 2001 From: Carson Lam Date: Fri, 15 Sep 2023 12:10:37 -0700 Subject: [PATCH] Models.ready docs --- README.md | 12 +++++++++++- src/together/models.py | 4 ++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 7b15b50c..7d175d27 100644 --- a/README.md +++ b/README.md @@ -315,7 +315,7 @@ The `get_job_status` should change from `pending` to `running` to `completed` as `carlton/ft-dd93c727-f35e-41c2-a370-7d55b54128fa-2023-08-16-10-15-09` -Now you can download your model using `together.Finetune.download(fine_tune_id)` or you start using your model for inference (may take a few minutes after finetuning to become available) by first starting your new model instance: +Now you can download your model using `together.Finetune.download(fine_tune_id)` or start using your model on our inference engine (may take a few minutes after finetuning to become available) by first starting your new model instance: ``` together.Models.start("carlton/ft-dd93c727-f35e-41c2-a370-7d55b54128fa-2023-08-16-10-15-09") @@ -330,6 +330,16 @@ output = together.Complete.create( ) ``` +To check whether your model is finished deploying, you can use the `Models.ready` like so: + +``` +together.Models.ready("carlton/ft-dd93c727-f35e-41c2-a370-7d55b54128fa-2023-08-16-10-15-09") +``` + +``` +{'ready': 'model is ready for start, status code:1'} +``` + ## Chat The `chat` command is a CLI-based chat application that can be used for back-and-forth conversations with models in a pre-defined format. diff --git a/src/together/models.py b/src/together/models.py index d62185e6..3ebd849d 100644 --- a/src/together/models.py +++ b/src/together/models.py @@ -152,9 +152,9 @@ def ready(self, model: str) -> Dict[str, str]: if model_dict.get("name") == model: depth_num_asks = model_dict["depth"]["num_asks"] if depth_num_asks > 0: - return {"ready":"model is ready for start, status code:"+depth_num_asks} + return {"ready":f"model is ready for start, status code:{depth_num_asks}"} else: - return {"ready":"model is not ready for start, status code:"+depth_num_asks} + return {"ready":f"model is not ready for start, status code:{depth_num_asks}"} else: return {"ready":f"No matching model name found for '{model}'."} else: