Skip to content

Commit

Permalink
fix: better output for cortex cli
Browse files Browse the repository at this point in the history
  • Loading branch information
vansangpfiev committed Aug 30, 2024
1 parent 670a477 commit 6980a95
Show file tree
Hide file tree
Showing 21 changed files with 118 additions and 65 deletions.
5 changes: 3 additions & 2 deletions engine/commands/chat_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include "httplib.h"

#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"

namespace commands {
namespace {
Expand Down Expand Up @@ -48,12 +49,12 @@ void ChatCmd::Exec(std::string msg) {
data_str.data(), data_str.size(), "application/json");
if (res) {
if (res->status != httplib::StatusCode::OK_200) {
LOG_INFO << res->body;
CTLOG_ERROR(res->body);
return;
}
} else {
auto err = res.error();
LOG_WARN << "HTTP error: " << httplib::to_string(err);
CTLOG_ERROR("HTTP error: " << httplib::to_string(err));
return;
}
}
Expand Down
5 changes: 3 additions & 2 deletions engine/commands/cmd_info.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#include "cmd_info.h"
#include <vector>
#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"

namespace commands {
namespace {
Expand Down Expand Up @@ -33,7 +34,7 @@ void CmdInfo::Parse(std::string model_id) {
} else {
auto res = split(model_id, kDelimiter);
if (res.size() != 2) {
LOG_ERROR << "model_id does not valid";
CTLOG_ERROR("<model_id> does not valid");
return;
} else {
model_name = std::move(res[0]);
Expand All @@ -45,7 +46,7 @@ void CmdInfo::Parse(std::string model_id) {
} else if (branch.find("gguf") != std::string::npos) {
engine_name = "cortex.llamacpp";
} else {
LOG_ERROR << "Not a valid branch model_name " << branch;
CTLOG_ERROR("Not a valid branch model_name " << branch);
}
}
}
Expand Down
25 changes: 13 additions & 12 deletions engine/commands/engine_init_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ bool EngineInitCmd::Exec() const {
<< system_info.arch;
return false;
}
LOG_INFO << "OS: " << system_info.os << ", Arch: " << system_info.arch;
CTLOG_INFO("OS: " << system_info.os << ", Arch: " << system_info.arch);

// check if engine is supported
if (std::find(supportedEngines_.begin(), supportedEngines_.end(),
Expand All @@ -43,7 +43,7 @@ bool EngineInitCmd::Exec() const {
std::ostringstream engineReleasePath;
engineReleasePath << "/repos/janhq/" << engineName_ << "/releases/"
<< version;
LOG_INFO << "Engine release path: " << gitHubHost << engineReleasePath.str();
CTLOG_INFO("Engine release path: " << gitHubHost << engineReleasePath.str());
using namespace nlohmann;

httplib::Client cli(gitHubHost);
Expand All @@ -61,8 +61,8 @@ bool EngineInitCmd::Exec() const {
}

auto cuda_version = system_info_utils::GetCudaVersion();
LOG_INFO << "engineName_: " << engineName_;
LOG_INFO << "CUDA version: " << cuda_version;
CTLOG_INFO("engineName_: " << engineName_);
CTLOG_INFO("CUDA version: " << cuda_version);
std::string matched_variant = "";
if (engineName_ == "cortex.tensorrt-llm") {
matched_variant = engine_matcher_utils::ValidateTensorrtLlm(
Expand All @@ -76,7 +76,7 @@ bool EngineInitCmd::Exec() const {
variants, system_info.os, system_info.arch, suitable_avx,
cuda_version);
}
LOG_INFO << "Matched variant: " << matched_variant;
CTLOG_INFO("Matched variant: " << matched_variant);
if (matched_variant.empty()) {
LOG_ERROR << "No variant found for " << os_arch;
return false;
Expand All @@ -91,7 +91,7 @@ bool EngineInitCmd::Exec() const {
std::string path = full_url.substr(host.length());

auto fileName = asset["name"].get<std::string>();
LOG_INFO << "URL: " << full_url;
CTLOG_INFO("URL: " << full_url);

auto downloadTask = DownloadTask{.id = engineName_,
.type = DownloadType::Engine,
Expand All @@ -110,8 +110,8 @@ bool EngineInitCmd::Exec() const {
bool unused) {
// try to unzip the downloaded file
std::filesystem::path downloadedEnginePath{absolute_path};
LOG_INFO << "Downloaded engine path: "
<< downloadedEnginePath.string();
CTLOG_INFO("Downloaded engine path: "
<< downloadedEnginePath.string());

archive_utils::ExtractArchive(
downloadedEnginePath.string(),
Expand All @@ -123,9 +123,9 @@ bool EngineInitCmd::Exec() const {
try {
std::filesystem::remove(absolute_path);
} catch (const std::exception& e) {
LOG_ERROR << "Could not delete file: " << e.what();
CTLOG_WARN("Could not delete file: " << e.what());
}
LOG_INFO << "Finished!";
CTLOG_INFO("Finished!");
});
if (system_info.os == "mac" || engineName_ == "cortex.onnx") {
return false;
Expand All @@ -135,12 +135,13 @@ bool EngineInitCmd::Exec() const {
const std::string cuda_toolkit_file_name = "cuda.tar.gz";
const std::string download_id = "cuda";

auto gpu_driver_version = system_info_utils::GetDriverVersion();
auto gpu_driver_version = system_info_utils::GetDriverVersion();
if(gpu_driver_version.empty()) return true;

auto cuda_runtime_version =
cuda_toolkit_utils::GetCompatibleCudaToolkitVersion(
gpu_driver_version, system_info.os, engineName_);

LOG_INFO << "abc";
std::ostringstream cuda_toolkit_path;
cuda_toolkit_path << "dist/cuda-dependencies/" << 11.7 << "/"
<< system_info.os << "/"
Expand Down
16 changes: 13 additions & 3 deletions engine/commands/model_get_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
#include <filesystem>
#include <iostream>
#include <vector>
#include "cmd_info.h"
#include "config/yaml_config.h"
#include "trantor/utils/Logger.h"
#include "utils/cortex_utils.h"
#include "utils/logging_utils.h"

namespace commands {

Expand All @@ -14,12 +16,15 @@ ModelGetCmd::ModelGetCmd(std::string model_handle)
void ModelGetCmd::Exec() {
if (std::filesystem::exists(cortex_utils::models_folder) &&
std::filesystem::is_directory(cortex_utils::models_folder)) {
CmdInfo ci(model_handle_);
std::string model_file =
ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch;
bool found_model = false;
// Iterate through directory
for (const auto& entry :
std::filesystem::directory_iterator(cortex_utils::models_folder)) {

if (entry.is_regular_file() && entry.path().stem() == model_handle_ &&
if (entry.is_regular_file() && entry.path().stem() == model_file &&
entry.path().extension() == ".yaml") {
try {
config::YamlHandler handler;
Expand Down Expand Up @@ -131,11 +136,16 @@ void ModelGetCmd::Exec() {
found_model = true;
break;
} catch (const std::exception& e) {
LOG_ERROR << "Error reading yaml file '" << entry.path().string()
<< "': " << e.what();
CTLOG_ERROR("Error reading yaml file '" << entry.path().string()
<< "': " << e.what());
}
}
}
if (!found_model) {
CLI_LOG("Model not found!");
}
} else {
CLI_LOG("Model not found!");
}
}
}; // namespace commands
5 changes: 3 additions & 2 deletions engine/commands/model_list_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include <vector>
#include "config/yaml_config.h"
#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"
namespace commands {

void ModelListCmd::Exec() {
Expand All @@ -30,8 +31,8 @@ void ModelListCmd::Exec() {
table.add_row({std::to_string(count), model_config.id,
model_config.engine, model_config.version});
} catch (const std::exception& e) {
LOG_ERROR << "Error reading yaml file '" << entry.path().string()
<< "': " << e.what();
CTLOG_ERROR("Error reading yaml file '" << entry.path().string()
<< "': " << e.what());
}
}
}
Expand Down
5 changes: 3 additions & 2 deletions engine/commands/model_pull_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "trantor/utils/Logger.h"
#include "utils/cortexso_parser.h"
#include "utils/model_callback_utils.h"
#include "utils/logging_utils.h"

namespace commands {
ModelPullCmd::ModelPullCmd(std::string model_handle, std::string branch)
Expand All @@ -15,10 +16,10 @@ bool ModelPullCmd::Exec() {
DownloadService downloadService;
downloadService.AddDownloadTask(downloadTask.value(),
model_callback_utils::DownloadModelCb);
std::cout << "Download finished" << std::endl;
CTLOG_INFO("Download finished");
return true;
} else {
std::cout << "Model not found" << std::endl;
CTLOG_ERROR("Model not found");
return false;
}
}
Expand Down
5 changes: 3 additions & 2 deletions engine/commands/model_start_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include "httplib.h"
#include "nlohmann/json.hpp"
#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"

namespace commands {
ModelStartCmd::ModelStartCmd(std::string host, int port,
Expand Down Expand Up @@ -32,11 +33,11 @@ bool ModelStartCmd::Exec() {
data_str.data(), data_str.size(), "application/json");
if (res) {
if (res->status == httplib::StatusCode::OK_200) {
LOG_INFO << res->body;
CLI_LOG("Model loaded!");
}
} else {
auto err = res.error();
LOG_WARN << "HTTP error: " << httplib::to_string(err);
CTLOG_ERROR("HTTP error: " << httplib::to_string(err));
return false;
}
return true;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#include "stop_model_cmd.h"
#include "model_stop_cmd.h"
#include "httplib.h"
#include "nlohmann/json.hpp"
#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"

namespace commands {
StopModelCmd::StopModelCmd(std::string host, int port,
ModelStopCmd::ModelStopCmd(std::string host, int port,
const config::ModelConfig& mc)
: host_(std::move(host)), port_(port), mc_(mc) {}

void StopModelCmd::Exec() {
void ModelStopCmd::Exec() {
httplib::Client cli(host_ + ":" + std::to_string(port_));
nlohmann::json json_data;
json_data["model"] = mc_.name;
Expand All @@ -20,11 +21,12 @@ void StopModelCmd::Exec() {
data_str.data(), data_str.size(), "application/json");
if (res) {
if (res->status == httplib::StatusCode::OK_200) {
LOG_INFO << res->body;
// LOG_INFO << res->body;
CLI_LOG("Model unloaded!");
}
} else {
auto err = res.error();
LOG_WARN << "HTTP error: " << httplib::to_string(err);
CTLOG_ERROR("HTTP error: " << httplib::to_string(err));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@

namespace commands {

class StopModelCmd{
class ModelStopCmd{
public:
StopModelCmd(std::string host, int port, const config::ModelConfig& mc);
ModelStopCmd(std::string host, int port, const config::ModelConfig& mc);
void Exec();

private:
Expand Down
4 changes: 3 additions & 1 deletion engine/commands/run_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,10 @@ void RunCmd::Exec() {
{
if (!IsEngineExisted(ci.engine_name)) {
EngineInitCmd eic(ci.engine_name, "");
if (!eic.Exec())
if (!eic.Exec()) {
LOG_INFO << "Failed to install engine";
return;
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
#include "stop_server_cmd.h"
#include "server_stop_cmd.h"
#include "httplib.h"
#include "trantor/utils/Logger.h"
#include "utils/logging_utils.h"

namespace commands {
StopServerCmd::StopServerCmd(std::string host, int port)
ServerStopCmd::ServerStopCmd(std::string host, int port)
: host_(std::move(host)), port_(port) {}

void StopServerCmd::Exec() {
void ServerStopCmd::Exec() {
httplib::Client cli(host_ + ":" + std::to_string(port_));
auto res = cli.Delete("/processManager/destroy");
if (res) {
LOG_INFO << res->body;
CLI_LOG("Server stopped!");
} else {
auto err = res.error();
LOG_WARN << "HTTP error: " << httplib::to_string(err);
CTLOG_ERROR("HTTP error: " << httplib::to_string(err));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@

namespace commands {

class StopServerCmd{
class ServerStopCmd{
public:
StopServerCmd(std::string host, int port);
ServerStopCmd(std::string host, int port);
void Exec();

private:
Expand Down
8 changes: 4 additions & 4 deletions engine/controllers/command_line_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
#include "commands/model_pull_cmd.h"
#include "commands/model_start_cmd.h"
#include "commands/run_cmd.h"
#include "commands/stop_model_cmd.h"
#include "commands/stop_server_cmd.h"
#include "commands/model_stop_cmd.h"
#include "commands/server_stop_cmd.h"
#include "config/yaml_config.h"
#include "utils/cortex_utils.h"

Expand Down Expand Up @@ -47,7 +47,7 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {
config::YamlHandler yaml_handler;
yaml_handler.ModelConfigFromFile(cortex_utils::GetCurrentPath() +
"/models/" + model_file + ".yaml");
commands::StopModelCmd smc("127.0.0.1", 3928,
commands::ModelStopCmd smc("127.0.0.1", 3928,
yaml_handler.GetModelConfig());
smc.Exec();
});
Expand Down Expand Up @@ -146,7 +146,7 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {

stop_cmd->callback([] {
// TODO get info from config file
commands::StopServerCmd ssc("127.0.0.1", 3928);
commands::ServerStopCmd ssc("127.0.0.1", 3928);
ssc.Exec();
});

Expand Down
3 changes: 3 additions & 0 deletions engine/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include "utils/archive_utils.h"
#include "utils/cortex_utils.h"
#include "utils/dylib.h"
#include "utils/logging_utils.h"

#if defined(__APPLE__) && defined(__MACH__)
#include <libgen.h> // for dirname()
Expand All @@ -22,6 +23,7 @@
#error "Unsupported platform!"
#endif


void RunServer() {
// Create logs/ folder and setup log to file
std::filesystem::create_directory(cortex_utils::logs_folder);
Expand Down Expand Up @@ -120,6 +122,7 @@ void ForkProcess() {
}

int main(int argc, char* argv[]) {
// log_verbose = true;
// Check if this process is for python execution
if (argc > 1) {
if (strcmp(argv[1], "--run_python_file") == 0) {
Expand Down
Loading

0 comments on commit 6980a95

Please sign in to comment.