Skip to content

Commit

Permalink
lab: exit program after catching and reporting exceptions (NVIDIA#336)
Browse files Browse the repository at this point in the history
* cli: use non-zero exit status for error scenarios

Invoking sys.exit() with no args results in an exit status
of zero, which traditionally indicates success. This is not
appropriate for error scenarios.

Signed-off-by: Daniel P. Berrangé <[email protected]>

* lab: exit program after catching and reporting exceptions

Creating the app failed with an exception raised due to a missing
model file. Execution carried on, however, leading to use of an
undefined 'app' variable. Fix this, and a couple of other places
which also caught (unrecoverable) exceptions and forgot to exit.

Signed-off-by: Daniel P. Berrangé <[email protected]>

---------

Signed-off-by: Daniel P. Berrangé <[email protected]>
  • Loading branch information
berrange authored Mar 7, 2024
1 parent f778cce commit b064d97
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions cli/lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ def init(ctx, interactive, model_path, taxonomy_path, repository, min_taxonomy):
f"Cloning {repository} failed with the following error: {exc}",
fg="red",
)
sys.exit(1)

# check if models dir exists, and if so ask for which model to use
models_dir = dirname(model_path)
Expand Down Expand Up @@ -241,6 +242,7 @@ def serve(ctx, model_path, gpu_layers):
f"Creating App using model failed with following value error: {err}",
fg="red",
)
sys.exit(1)
try:
llama_app._llama_proxy._current_model.chat_handler = llama_chat_format.Jinja2ChatFormatter(
template="{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
Expand All @@ -253,6 +255,7 @@ def serve(ctx, model_path, gpu_layers):
f"Error creating chat handler: {e}",
fg="red",
)
sys.exit(1)
click.echo("Starting server process")
click.echo(
"After application startup complete see http://127.0.0.1:8000/docs for API."
Expand Down Expand Up @@ -334,6 +337,7 @@ def generate(
f"Error connecting to the server: {exc.__cause__}",
fg="red",
)
sys.exit(1)


@cli.command()
Expand Down Expand Up @@ -380,6 +384,7 @@ def chat(ctx, question, model, context, session, quick_question):
)
except ChatException as exc:
click.secho(f"Executing chat failed with: {exc}", fg="red")
sys.exit(1)


@cli.command()
Expand Down Expand Up @@ -420,6 +425,7 @@ def download(ctx, repository, release, filename, model_dir):
f"Downloading model failed with the following Hugging Face Hub error: {exc}",
fg="red",
)
sys.exit(1)


@cli.command()
Expand Down Expand Up @@ -482,19 +488,19 @@ def train(
f"Could not read taxonomy directory: {exc}",
fg="red",
)
sys.exit()
sys.exit(1)
except OSError as exc:
click.secho(
f"Could not create data dir: {exc}",
fg="red",
)
sys.exit()
sys.exit(1)
except IndexError as exc:
click.secho(
f"Could not copy into data directory: {exc}",
fg="red",
)
sys.exit()
sys.exit(1)

if not skip_preprocessing:
script = os.path.join(cli_dir, "train/lora-mlx/make_data.py")
Expand Down

0 comments on commit b064d97

Please sign in to comment.