Skip to content

Commit

Permalink
Merge pull request #1527 from janhq/j/remove-chat-cmd
Browse files Browse the repository at this point in the history
fix: remove chat command
  • Loading branch information
namchuai authored Oct 22, 2024
2 parents 3f9680c + 52014fb commit baf75e3
Showing 1 changed file with 0 additions and 32 deletions.
32 changes: 0 additions & 32 deletions engine/cli/command_line_parser.cc
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
#include "command_line_parser.h"
#include <memory>
#include <optional>
#include "commands/chat_cmd.h"
#include "commands/chat_completion_cmd.h"
#include "commands/cortex_upd_cmd.h"
#include "commands/engine_get_cmd.h"
#include "commands/engine_install_cmd.h"
Expand Down Expand Up @@ -152,36 +150,6 @@ void CommandLineParser::SetupCommonCommands() {
cml_data_.model_id, download_service_);
rc.Exec(cml_data_.run_detach);
});

auto chat_cmd = app_.add_subcommand(
"chat",
"Shortcut for `cortex run --chat` or send a chat completion request");
chat_cmd->group(kCommonCommandsGroup);
chat_cmd->usage("Usage:\n" + commands::GetCortexBinary() +
" chat [model_id] -m [msg]");
chat_cmd->add_option("model_id", cml_data_.model_id, "");
chat_cmd->add_option("-m,--message", cml_data_.msg,
"Message to chat with model");
chat_cmd->callback([this, chat_cmd] {
if (std::exchange(executed_, true))
return;
if (cml_data_.model_id.empty()) {
CLI_LOG("[model_id] is required\n");
CLI_LOG(chat_cmd->help());
return;
}

if (cml_data_.msg.empty()) {
commands::ChatCmd().Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort),
cml_data_.model_id, download_service_);
} else {
commands::ChatCompletionCmd(model_service_)
.Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort), cml_data_.model_id,
cml_data_.msg);
}
});
}

void CommandLineParser::SetupInferenceCommands() {
Expand Down

0 comments on commit baf75e3

Please sign in to comment.