Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync dev to main 1.0.3-rc4 #1708

Merged
merged 3 commits into from
Nov 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/sidebars.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ const sidebars: SidebarsConfig = {
type: "doc",
id: "configurations/token",
label: "Token",
}
},
],
},
{
Expand Down
5 changes: 3 additions & 2 deletions engine/cli/command_line_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -602,8 +602,9 @@ void CommandLineParser::SetupSystemCommands() {
<< " to " << cml_data_.port);
auto config_path = file_manager_utils::GetConfigurationPath();
cml_data_.config.apiServerPort = std::to_string(cml_data_.port);
auto result = config_yaml_utils::DumpYamlConfig(cml_data_.config,
config_path.string());
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
cml_data_.config, config_path.string());
if (result.has_error()) {
CLI_LOG("Error update " << config_path.string() << result.error());
}
Expand Down
5 changes: 3 additions & 2 deletions engine/cli/commands/cortex_upd_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,9 @@ std::optional<std::string> CheckNewUpdate(
CTL_INF("Got the latest release, update to the config file: "
<< latest_version)
config.latestRelease = latest_version;
auto result = config_yaml_utils::DumpYamlConfig(
config, file_manager_utils::GetConfigurationPath().string());
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, file_manager_utils::GetConfigurationPath().string());
if (result.has_error()) {
CTL_ERR("Error update "
<< file_manager_utils::GetConfigurationPath().string()
Expand Down
5 changes: 3 additions & 2 deletions engine/cli/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,9 @@ int main(int argc, char* argv[]) {
.count();
config.latestLlamacppRelease = res.value();

auto upd_config_res = config_yaml_utils::DumpYamlConfig(
config, file_manager_utils::GetConfigurationPath().string());
auto upd_config_res =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, file_manager_utils::GetConfigurationPath().string());
if (upd_config_res.has_error()) {
CTL_ERR("Failed to update config file: " << upd_config_res.error());
} else {
Expand Down
10 changes: 9 additions & 1 deletion engine/controllers/models.cc
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,14 @@ void Models::StartModel(
// model_path has higher priority
if (auto& o = (*(req->getJsonObject()))["llama_model_path"]; !o.isNull()) {
params_override.model_path = o.asString();
if (auto& mp = (*(req->getJsonObject()))["model_path"]; mp.isNull()) {
// Bypass if model does not exist in DB and llama_model_path exists
if (std::filesystem::exists(params_override.model_path.value()) &&
!model_service_->HasModel(model_handle)) {
CTL_INF("llama_model_path exists, bypass check model id");
params_override.bypass_llama_model_path = true;
}
}
}

if (auto& o = (*(req->getJsonObject()))["model_path"]; !o.isNull()) {
Expand Down Expand Up @@ -489,7 +497,7 @@ void Models::StartModel(
auto& v = result.value();
Json::Value ret;
ret["message"] = "Started successfully!";
if(v.warning) {
if (v.warning) {
ret["warning"] = *(v.warning);
}
auto resp = cortex_utils::CreateCortexHttpJsonResponse(ret);
Expand Down
3 changes: 2 additions & 1 deletion engine/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ void RunServer(std::optional<int> port, bool ignore_cout) {
auto config_path = file_manager_utils::GetConfigurationPath();
config.apiServerPort = std::to_string(*port);
auto result =
config_yaml_utils::DumpYamlConfig(config, config_path.string());
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, config_path.string());
if (result.has_error()) {
CTL_ERR("Error update " << config_path.string() << result.error());
}
Expand Down
2 changes: 1 addition & 1 deletion engine/services/engine_service.cc
Original file line number Diff line number Diff line change
Expand Up @@ -899,7 +899,7 @@ cpp::result<void, std::string> EngineService::LoadEngine(
CTL_WRN("Method SetFileLogger is not supported yet");
}
if (en->IsSupported("SetLogLevel")) {
en->SetLogLevel(trantor::Logger::logLevel());
en->SetLogLevel(logging_utils_helper::global_log_level);
} else {
CTL_WRN("Method SetLogLevel is not supported yet");
}
Expand Down
5 changes: 4 additions & 1 deletion engine/services/hardware_service.cc
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ HardwareInfo HardwareService::GetHardwareInfo() {
}

bool HardwareService::Restart(const std::string& host, int port) {
namespace luh = logging_utils_helper;
if (!ahc_)
return true;
auto exe = commands::GetCortexServerBinary();
Expand Down Expand Up @@ -117,6 +118,7 @@ bool HardwareService::Restart(const std::string& host, int port) {
std::string params = "--ignore_cout";
params += " --config_file_path " + get_config_file_path();
params += " --data_folder_path " + get_data_folder_path();
params += " --loglevel " + luh::LogLevelStr(luh::global_log_level);
std::string cmds = cortex_utils::GetCurrentPath() + "/" + exe + " " + params;
// Create child process
if (!CreateProcess(
Expand Down Expand Up @@ -168,7 +170,8 @@ bool HardwareService::Restart(const std::string& host, int port) {
std::string p = cortex_utils::GetCurrentPath() + "/" + exe;
execl(p.c_str(), exe.c_str(), "--ignore_cout", "--config_file_path",
get_config_file_path().c_str(), "--data_folder_path",
get_data_folder_path().c_str(), "--loglevel", "INFO", (char*)0);
get_data_folder_path().c_str(), "--loglevel",
luh::LogLevelStr(luh::global_log_level).c_str(), (char*)0);
} else {
// Parent process
if (!TryConnectToServer(host, port)) {
Expand Down
7 changes: 6 additions & 1 deletion engine/services/model_service.cc
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,10 @@ cpp::result<std::string, std::string> ModelService::HandleUrl(
return unique_model_id;
}

bool ModelService::HasModel(const std::string& id) const {
return cortex::db::Models().HasModel(id);
}

cpp::result<DownloadTask, std::string>
ModelService::DownloadModelFromCortexsoAsync(
const std::string& name, const std::string& branch,
Expand Down Expand Up @@ -745,7 +749,8 @@ cpp::result<StartModelResult, std::string> ModelService::StartModel(
return cpp::fail(
"Not enough VRAM - required: " + std::to_string(vram_needed_MiB) +
" MiB, available: " + std::to_string(free_vram_MiB) +
" MiB - Should adjust ngl to " + std::to_string(free_vram_MiB / (vram_needed_MiB / ngl) - 1));
" MiB - Should adjust ngl to " +
std::to_string(free_vram_MiB / (vram_needed_MiB / ngl) - 1));
}

if (ram_needed_MiB > free_ram_MiB) {
Expand Down
13 changes: 9 additions & 4 deletions engine/services/model_service.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
#include <memory>
#include <optional>
#include <string>
#include "common/engine_servicei.h"
#include "config/model_config.h"
#include "services/download_service.h"
#include "services/inference_service.h"
#include "common/engine_servicei.h"

struct ModelPullInfo {
std::string id;
Expand All @@ -26,12 +26,15 @@ struct StartParameterOverride {
std::optional<std::string> cache_type;
std::optional<std::string> mmproj;
std::optional<std::string> model_path;
bool bypass_model_check() const { return mmproj.has_value(); }
bool bypass_llama_model_path = false;
bool bypass_model_check() const {
return mmproj.has_value() || bypass_llama_model_path;
}
};

struct StartModelResult {
bool success;
std::optional<std::string> warning;
bool success;
std::optional<std::string> warning;
};

class ModelService {
Expand Down Expand Up @@ -89,6 +92,8 @@ class ModelService {
const std::string& url, std::optional<std::string> temp_model_id,
std::optional<std::string> temp_name);

bool HasModel(const std::string& id) const;

private:
/**
* Handle downloading model which have following pattern: author/model_name
Expand Down
18 changes: 13 additions & 5 deletions engine/test/components/test_cortex_config.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include "utils/config_yaml_utils.h"

namespace config_yaml_utils {
namespace cyu = config_yaml_utils;
class CortexConfigTest : public ::testing::Test {
protected:
const std::string test_file_path = "test_config.yaml";
Expand Down Expand Up @@ -43,7 +44,8 @@ TEST_F(CortexConfigTest, DumpYamlConfig_WritesCorrectly) {
123456789,
"v1.0.0"};

auto result = DumpYamlConfig(config, test_file_path);
auto result = cyu::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, test_file_path);
EXPECT_FALSE(result.has_error());

// Verify that the file was created and contains the expected data
Expand Down Expand Up @@ -72,11 +74,13 @@ TEST_F(CortexConfigTest, FromYaml_ReadsCorrectly) {
123456789,
"v1.0.0"};

auto result = DumpYamlConfig(config, test_file_path);
auto result = cyu::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, test_file_path);
EXPECT_FALSE(result.has_error());

// Now read from the YAML file
CortexConfig loaded_config = FromYaml(test_file_path, default_config);
CortexConfig loaded_config = cyu::CortexConfigMgr::GetInstance().FromYaml(
test_file_path, default_config);

// Verify that the loaded configuration matches what was written
EXPECT_EQ(loaded_config.logFolderPath, config.logFolderPath);
Expand All @@ -92,7 +96,10 @@ TEST_F(CortexConfigTest, FromYaml_FileNotFound) {
std::filesystem::remove(test_file_path); // Ensure the file does not exist

EXPECT_THROW(
{ FromYaml(test_file_path, default_config); },
{
cyu::CortexConfigMgr::GetInstance().FromYaml(test_file_path,
default_config);
},
std::runtime_error); // Expect a runtime error due to missing file
}

Expand All @@ -102,7 +109,8 @@ TEST_F(CortexConfigTest, FromYaml_IncompleteConfigUsesDefaults) {
out_file << "logFolderPath: log_path\n"; // Missing other fields
out_file.close();

CortexConfig loaded_config = FromYaml(test_file_path, default_config);
CortexConfig loaded_config = cyu::CortexConfigMgr::GetInstance().FromYaml(
test_file_path, default_config);

// Verify that defaults are used where values are missing
EXPECT_EQ(loaded_config.logFolderPath, "log_path");
Expand Down
7 changes: 5 additions & 2 deletions engine/test/components/test_file_manager_config_yaml_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ TEST_F(FileManagerConfigTest, DumpYamlConfig) {
.apiServerPort = "8080"};

std::string test_file = "test_config.yaml";
auto result = config_yaml_utils::DumpYamlConfig(config, test_file);
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
config, test_file);
EXPECT_FALSE(result.has_error());
EXPECT_TRUE(std::filesystem::exists(test_file));

Expand All @@ -83,7 +85,8 @@ TEST_F(FileManagerConfigTest, FromYaml) {
out_file.close();

config_yaml_utils::CortexConfig default_config{};
auto config = config_yaml_utils::FromYaml(test_file, default_config);
auto config = config_yaml_utils::CortexConfigMgr::GetInstance().FromYaml(
test_file, default_config);

EXPECT_EQ(config.logFolderPath, "/path/to/logs");
EXPECT_EQ(config.dataFolderPath, "/path/to/data");
Expand Down
Loading
Loading