Skip to content

Commit

Permalink
Merge branch 'feat/python-engine' of github.com:janhq/cortex.cpp into…
Browse files Browse the repository at this point in the history
… feat/python-engine
  • Loading branch information
nguyenhoangthuan99 committed Dec 30, 2024
2 parents 035f2d5 + 084c27c commit dc10a21
Show file tree
Hide file tree
Showing 61 changed files with 3,705 additions and 427 deletions.
542 changes: 497 additions & 45 deletions docs/static/openapi/cortex.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ target_link_libraries(${TARGET_NAME} PRIVATE JsonCpp::JsonCpp Drogon::Drogon Ope
target_link_libraries(${TARGET_NAME} PRIVATE SQLiteCpp)
target_link_libraries(${TARGET_NAME} PRIVATE eventpp::eventpp)
target_link_libraries(${TARGET_NAME} PRIVATE lfreist-hwinfo::hwinfo)

# ##############################################################################

if(CMAKE_CXX_STANDARD LESS 17)
Expand Down
1 change: 1 addition & 0 deletions engine/cli/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ add_executable(${TARGET_NAME} main.cc
${CMAKE_CURRENT_SOURCE_DIR}/../services/model_service.cc
${CMAKE_CURRENT_SOURCE_DIR}/../services/inference_service.cc
${CMAKE_CURRENT_SOURCE_DIR}/../services/hardware_service.cc
${CMAKE_CURRENT_SOURCE_DIR}/../services/database_service.cc
${CMAKE_CURRENT_SOURCE_DIR}/../extensions/remote-engine/remote_engine.cc

${CMAKE_CURRENT_SOURCE_DIR}/../extensions/python-engine/python_engine.cc
Expand Down
14 changes: 8 additions & 6 deletions engine/cli/command_line_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,9 @@ CommandLineParser::CommandLineParser()
: app_("\nCortex.cpp CLI\n"),
download_service_{std::make_shared<DownloadService>()},
dylib_path_manager_{std::make_shared<cortex::DylibPathManager>()},
engine_service_{std::make_shared<EngineService>(download_service_,
dylib_path_manager_)} {
db_service_{std::make_shared<DatabaseService>()},
engine_service_{std::make_shared<EngineService>(
download_service_, dylib_path_manager_, db_service_)} {
supported_engines_ = engine_service_->GetSupportedEngineNames().value();
}

Expand Down Expand Up @@ -177,7 +178,7 @@ void CommandLineParser::SetupCommonCommands() {
return;
commands::RunCmd rc(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort),
cml_data_.model_id, engine_service_);
cml_data_.model_id, db_service_, engine_service_);
rc.Exec(cml_data_.run_detach, run_settings_);
});
}
Expand Down Expand Up @@ -216,9 +217,10 @@ void CommandLineParser::SetupModelCommands() {
CLI_LOG(model_start_cmd->help());
return;
};
commands::ModelStartCmd().Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort),
cml_data_.model_id, run_settings_);
commands::ModelStartCmd(db_service_)
.Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort), cml_data_.model_id,
run_settings_);
});

auto stop_model_cmd =
Expand Down
1 change: 1 addition & 0 deletions engine/cli/command_line_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class CommandLineParser {
CLI::App app_;
std::shared_ptr<DownloadService> download_service_;
std::shared_ptr<cortex::DylibPathManager> dylib_path_manager_;
std::shared_ptr<DatabaseService> db_service_;
std::shared_ptr<EngineService> engine_service_;
std::vector<std::string> supported_engines_;

Expand Down
3 changes: 1 addition & 2 deletions engine/cli/commands/chat_completion_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,9 @@ void ChatCompletionCmd::Exec(const std::string& host, int port,
const std::string& model_handle, std::string msg) {
namespace fs = std::filesystem;
namespace fmu = file_manager_utils;
cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;
try {
auto model_entry = modellist_handler.GetModelInfo(model_handle);
auto model_entry = db_service_->GetModelInfo(model_handle);
if (model_entry.has_error()) {
CLI_LOG("Error: " + model_entry.error());
return;
Expand Down
4 changes: 4 additions & 0 deletions engine/cli/commands/chat_completion_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@
#include <string>
#include <vector>
#include "config/model_config.h"
#include "services/database_service.h"

namespace commands {
class ChatCompletionCmd {
public:
explicit ChatCompletionCmd(std::shared_ptr<DatabaseService> db_service)
: db_service_(db_service) {}
void Exec(const std::string& host, int port, const std::string& model_handle,
std::string msg);
void Exec(const std::string& host, int port, const std::string& model_handle,
const config::ModelConfig& mc, std::string msg);

private:
std::shared_ptr<DatabaseService> db_service_;
std::vector<Json::Value> histories_;
};
} // namespace commands
2 changes: 1 addition & 1 deletion engine/cli/commands/model_start_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ bool ModelStartCmd::Exec(
const std::unordered_map<std::string, std::string>& options,
bool print_success_log) {
std::optional<std::string> model_id =
SelectLocalModel(host, port, model_handle);
SelectLocalModel(host, port, model_handle, *db_service_);

if (!model_id.has_value()) {
return false;
Expand Down
9 changes: 8 additions & 1 deletion engine/cli/commands/model_start_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,23 @@
#include <string>
#include <unordered_map>
#include "json/json.h"
#include "services/database_service.h"

namespace commands {

class ModelStartCmd {
public:
explicit ModelStartCmd(std::shared_ptr<DatabaseService> db_service)
: db_service_(db_service) {}
bool Exec(const std::string& host, int port, const std::string& model_handle,
const std::unordered_map<std::string, std::string>& options,
bool print_success_log = true);
private:

private:
bool UpdateConfig(Json::Value& data, const std::string& key,
const std::string& value);

private:
std::shared_ptr<DatabaseService> db_service_;
};
} // namespace commands
18 changes: 8 additions & 10 deletions engine/cli/commands/run_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,11 @@
namespace commands {

std::optional<std::string> SelectLocalModel(std::string host, int port,
const std::string& model_handle) {
const std::string& model_handle,
DatabaseService& db_service) {
std::optional<std::string> model_id = model_handle;
cortex::db::Models modellist_handler;

if (model_handle.empty()) {
auto all_local_models = modellist_handler.LoadModelList();
auto all_local_models = db_service.LoadModelList();
if (all_local_models.has_error() || all_local_models.value().empty()) {
CLI_LOG("No local models available!");
return std::nullopt;
Expand All @@ -42,7 +41,7 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
CLI_LOG("Selected: " << selection.value());
}
} else {
auto related_models_ids = modellist_handler.FindRelatedModel(model_handle);
auto related_models_ids = db_service.FindRelatedModel(model_handle);
if (related_models_ids.has_error() || related_models_ids.value().empty()) {
auto result = ModelPullCmd().Exec(host, port, model_handle);
if (!result) {
Expand All @@ -69,19 +68,18 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
void RunCmd::Exec(bool run_detach,
const std::unordered_map<std::string, std::string>& options) {
std::optional<std::string> model_id =
SelectLocalModel(host_, port_, model_handle_);
SelectLocalModel(host_, port_, model_handle_, *db_service_);
if (!model_id.has_value()) {
return;
}

cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;
auto address = host_ + ":" + std::to_string(port_);

try {
namespace fs = std::filesystem;
namespace fmu = file_manager_utils;
auto model_entry = modellist_handler.GetModelInfo(*model_id);
auto model_entry = db_service_->GetModelInfo(*model_id);
if (model_entry.has_error()) {
CLI_LOG("Error: " + model_entry.error());
return;
Expand Down Expand Up @@ -128,7 +126,7 @@ void RunCmd::Exec(bool run_detach,
mc.engine.find(kLlamaEngine) == std::string::npos) ||
!commands::ModelStatusCmd().IsLoaded(host_, port_, *model_id)) {

auto res = commands::ModelStartCmd()
auto res = commands::ModelStartCmd(db_service_)
.Exec(host_, port_, *model_id, options,
false /*print_success_log*/);
if (!res) {
Expand All @@ -144,7 +142,7 @@ void RunCmd::Exec(bool run_detach,
<< commands::GetCortexBinary() << " run " << *model_id
<< "` for interactive chat shell");
} else {
ChatCompletionCmd().Exec(host_, port_, *model_id, mc, "");
ChatCompletionCmd(db_service_).Exec(host_, port_, *model_id, mc, "");
}
}
} catch (const std::exception& e) {
Expand Down
7 changes: 6 additions & 1 deletion engine/cli/commands/run_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,24 @@

#include <string>
#include <unordered_map>
#include "services/database_service.h"
#include "services/engine_service.h"

namespace commands {

std::optional<std::string> SelectLocalModel(std::string host, int port,
const std::string& model_handle);
const std::string& model_handle,
DatabaseService& db_service);

class RunCmd {
public:
explicit RunCmd(std::string host, int port, std::string model_handle,
std::shared_ptr<DatabaseService> db_service,
std::shared_ptr<EngineService> engine_service)
: host_{std::move(host)},
port_{port},
model_handle_{std::move(model_handle)},
db_service_(db_service),
engine_service_{engine_service} {};

void Exec(bool chat_flag,
Expand All @@ -25,6 +29,7 @@ class RunCmd {
std::string host_;
int port_;
std::string model_handle_;
std::shared_ptr<DatabaseService> db_service_;
std::shared_ptr<EngineService> engine_service_;
};
} // namespace commands
3 changes: 2 additions & 1 deletion engine/cli/commands/server_start_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ bool ServerStartCmd::Exec(const std::string& host, int port,
// Some engines requires to add lib search path before process being created
auto download_srv = std::make_shared<DownloadService>();
auto dylib_path_mng = std::make_shared<cortex::DylibPathManager>();
EngineService(download_srv, dylib_path_mng).RegisterEngineLibPath();
auto db_srv = std::make_shared<DatabaseService>();
EngineService(download_srv, dylib_path_mng, db_srv).RegisterEngineLibPath();

std::string p = cortex_utils::GetCurrentPath() + "/" + exe;
execl(p.c_str(), exe.c_str(), "--start-server", "--config_file_path",
Expand Down
Loading

0 comments on commit dc10a21

Please sign in to comment.