Skip to content

Commit

Permalink
chore: db: models to service
Browse files Browse the repository at this point in the history
  • Loading branch information
sangjanai committed Dec 29, 2024
1 parent 8056f9c commit 55739c3
Show file tree
Hide file tree
Showing 16 changed files with 204 additions and 136 deletions.
9 changes: 5 additions & 4 deletions engine/cli/command_line_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ void CommandLineParser::SetupCommonCommands() {
return;
commands::RunCmd rc(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort),
cml_data_.model_id, engine_service_);
cml_data_.model_id, db_service_, engine_service_);
rc.Exec(cml_data_.run_detach, run_settings_);
});
}
Expand Down Expand Up @@ -217,9 +217,10 @@ void CommandLineParser::SetupModelCommands() {
CLI_LOG(model_start_cmd->help());
return;
};
commands::ModelStartCmd().Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort),
cml_data_.model_id, run_settings_);
commands::ModelStartCmd(db_service_)
.Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort), cml_data_.model_id,
run_settings_);
});

auto stop_model_cmd =
Expand Down
3 changes: 1 addition & 2 deletions engine/cli/commands/chat_completion_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,9 @@ void ChatCompletionCmd::Exec(const std::string& host, int port,
const std::string& model_handle, std::string msg) {
namespace fs = std::filesystem;
namespace fmu = file_manager_utils;
cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;
try {
auto model_entry = modellist_handler.GetModelInfo(model_handle);
auto model_entry = db_service_->GetModelInfo(model_handle);
if (model_entry.has_error()) {
CLI_LOG("Error: " + model_entry.error());
return;
Expand Down
4 changes: 4 additions & 0 deletions engine/cli/commands/chat_completion_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@
#include <string>
#include <vector>
#include "config/model_config.h"
#include "services/database_service.h"

namespace commands {
class ChatCompletionCmd {
public:
explicit ChatCompletionCmd(std::shared_ptr<DatabaseService> db_service)
: db_service_(db_service) {}
void Exec(const std::string& host, int port, const std::string& model_handle,
std::string msg);
void Exec(const std::string& host, int port, const std::string& model_handle,
const config::ModelConfig& mc, std::string msg);

private:
std::shared_ptr<DatabaseService> db_service_;
std::vector<Json::Value> histories_;
};
} // namespace commands
2 changes: 1 addition & 1 deletion engine/cli/commands/model_start_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ bool ModelStartCmd::Exec(
const std::unordered_map<std::string, std::string>& options,
bool print_success_log) {
std::optional<std::string> model_id =
SelectLocalModel(host, port, model_handle);
SelectLocalModel(host, port, model_handle, *db_service_);

if (!model_id.has_value()) {
return false;
Expand Down
9 changes: 8 additions & 1 deletion engine/cli/commands/model_start_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,23 @@
#include <string>
#include <unordered_map>
#include "json/json.h"
#include "services/database_service.h"

namespace commands {

class ModelStartCmd {
public:
explicit ModelStartCmd(std::shared_ptr<DatabaseService> db_service)
: db_service_(db_service) {}
bool Exec(const std::string& host, int port, const std::string& model_handle,
const std::unordered_map<std::string, std::string>& options,
bool print_success_log = true);
private:

private:
bool UpdateConfig(Json::Value& data, const std::string& key,
const std::string& value);

private:
std::shared_ptr<DatabaseService> db_service_;
};
} // namespace commands
18 changes: 8 additions & 10 deletions engine/cli/commands/run_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,11 @@
namespace commands {

std::optional<std::string> SelectLocalModel(std::string host, int port,
const std::string& model_handle) {
const std::string& model_handle,
DatabaseService& db_service) {
std::optional<std::string> model_id = model_handle;
cortex::db::Models modellist_handler;

if (model_handle.empty()) {
auto all_local_models = modellist_handler.LoadModelList();
auto all_local_models = db_service.LoadModelList();
if (all_local_models.has_error() || all_local_models.value().empty()) {
CLI_LOG("No local models available!");
return std::nullopt;
Expand All @@ -42,7 +41,7 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
CLI_LOG("Selected: " << selection.value());
}
} else {
auto related_models_ids = modellist_handler.FindRelatedModel(model_handle);
auto related_models_ids = db_service.FindRelatedModel(model_handle);
if (related_models_ids.has_error() || related_models_ids.value().empty()) {
auto result = ModelPullCmd().Exec(host, port, model_handle);
if (!result) {
Expand All @@ -69,19 +68,18 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
void RunCmd::Exec(bool run_detach,
const std::unordered_map<std::string, std::string>& options) {
std::optional<std::string> model_id =
SelectLocalModel(host_, port_, model_handle_);
SelectLocalModel(host_, port_, model_handle_, *db_service_);
if (!model_id.has_value()) {
return;
}

cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;
auto address = host_ + ":" + std::to_string(port_);

try {
namespace fs = std::filesystem;
namespace fmu = file_manager_utils;
auto model_entry = modellist_handler.GetModelInfo(*model_id);
auto model_entry = db_service_->GetModelInfo(*model_id);
if (model_entry.has_error()) {
CLI_LOG("Error: " + model_entry.error());
return;
Expand Down Expand Up @@ -128,7 +126,7 @@ void RunCmd::Exec(bool run_detach,
mc.engine.find(kLlamaEngine) == std::string::npos) ||
!commands::ModelStatusCmd().IsLoaded(host_, port_, *model_id)) {

auto res = commands::ModelStartCmd()
auto res = commands::ModelStartCmd(db_service_)
.Exec(host_, port_, *model_id, options,
false /*print_success_log*/);
if (!res) {
Expand All @@ -144,7 +142,7 @@ void RunCmd::Exec(bool run_detach,
<< commands::GetCortexBinary() << " run " << *model_id
<< "` for interactive chat shell");
} else {
ChatCompletionCmd().Exec(host_, port_, *model_id, mc, "");
ChatCompletionCmd(db_service_).Exec(host_, port_, *model_id, mc, "");
}
}
} catch (const std::exception& e) {
Expand Down
7 changes: 6 additions & 1 deletion engine/cli/commands/run_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,24 @@

#include <string>
#include <unordered_map>
#include "services/database_service.h"
#include "services/engine_service.h"

namespace commands {

std::optional<std::string> SelectLocalModel(std::string host, int port,
const std::string& model_handle);
const std::string& model_handle,
DatabaseService& db_service);

class RunCmd {
public:
explicit RunCmd(std::string host, int port, std::string model_handle,
std::shared_ptr<DatabaseService> db_service,
std::shared_ptr<EngineService> engine_service)
: host_{std::move(host)},
port_{port},
model_handle_{std::move(model_handle)},
db_service_(db_service),
engine_service_{engine_service} {};

void Exec(bool chat_flag,
Expand All @@ -25,6 +29,7 @@ class RunCmd {
std::string host_;
int port_;
std::string model_handle_;
std::shared_ptr<DatabaseService> db_service_;
std::shared_ptr<EngineService> engine_service_;
};
} // namespace commands
15 changes: 5 additions & 10 deletions engine/controllers/models.cc
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,9 @@ void Models::ListModel(
model_service_->ForceIndexingModelList();
// Iterate through directory

cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;

auto list_entry = modellist_handler.LoadModelList();
auto list_entry = db_service_->LoadModelList();
if (list_entry) {
for (const auto& model_entry : list_entry.value()) {
try {
Expand Down Expand Up @@ -256,9 +255,8 @@ void Models::GetModel(const HttpRequestPtr& req,
Json::Value ret;

try {
cortex::db::Models modellist_handler;
config::YamlHandler yaml_handler;
auto model_entry = modellist_handler.GetModelInfo(model_id);
auto model_entry = db_service_->GetModelInfo(model_id);
if (model_entry.has_error()) {
ret["id"] = model_id;
ret["object"] = "model";
Expand Down Expand Up @@ -337,8 +335,7 @@ void Models::UpdateModel(const HttpRequestPtr& req,
namespace fmu = file_manager_utils;
auto json_body = *(req->getJsonObject());
try {
cortex::db::Models model_list_utils;
auto model_entry = model_list_utils.GetModelInfo(model_id);
auto model_entry = db_service_->GetModelInfo(model_id);
config::YamlHandler yaml_handler;
auto yaml_fp = fmu::ToAbsoluteCortexDataPath(
fs::path(model_entry.value().path_to_model_yaml));
Expand Down Expand Up @@ -401,7 +398,6 @@ void Models::ImportModel(
auto option = (*(req->getJsonObject())).get("option", "symlink").asString();
config::GGUFHandler gguf_handler;
config::YamlHandler yaml_handler;
cortex::db::Models modellist_utils_obj;
std::string model_yaml_path = (file_manager_utils::GetModelsContainerPath() /
std::filesystem::path("imported") /
std::filesystem::path(modelHandle + ".yml"))
Expand Down Expand Up @@ -440,7 +436,7 @@ void Models::ImportModel(
model_config.name = modelName.empty() ? model_config.name : modelName;
yaml_handler.UpdateModelConfig(model_config);

if (modellist_utils_obj.AddModelEntry(model_entry).value()) {
if (db_service_->AddModelEntry(model_entry).value()) {
yaml_handler.WriteYamlFile(model_yaml_path);
std::string success_message = "Model is imported successfully!";
LOG_INFO << success_message;
Expand Down Expand Up @@ -667,7 +663,6 @@ void Models::AddRemoteModel(

config::RemoteModelConfig model_config;
model_config.LoadFromJson(*(req->getJsonObject()));
cortex::db::Models modellist_utils_obj;
std::string model_yaml_path = (file_manager_utils::GetModelsContainerPath() /
std::filesystem::path("remote") /
std::filesystem::path(model_handle + ".yml"))
Expand All @@ -683,7 +678,7 @@ void Models::AddRemoteModel(
"openai"};
std::filesystem::create_directories(
std::filesystem::path(model_yaml_path).parent_path());
if (modellist_utils_obj.AddModelEntry(model_entry).value()) {
if (db_service_->AddModelEntry(model_entry).value()) {
model_config.SaveToYamlFile(model_yaml_path);
std::string success_message = "Model is imported successfully!";
LOG_INFO << success_message;
Expand Down
7 changes: 5 additions & 2 deletions engine/controllers/models.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,12 @@ class Models : public drogon::HttpController<Models, false> {
ADD_METHOD_TO(Models::GetModelSources, "/v1/models/sources", Get);
METHOD_LIST_END

explicit Models(std::shared_ptr<ModelService> model_service,
explicit Models(std::shared_ptr<DatabaseService> db_service,
std::shared_ptr<ModelService> model_service,
std::shared_ptr<EngineService> engine_service,
std::shared_ptr<ModelSourceService> mss)
: model_service_{model_service},
: db_service_(db_service),
model_service_{model_service},
engine_service_{engine_service},
model_src_svc_(mss) {}

Expand Down Expand Up @@ -105,6 +107,7 @@ class Models : public drogon::HttpController<Models, false> {
std::function<void(const HttpResponsePtr&)>&& callback);

private:
std::shared_ptr<DatabaseService> db_service_;
std::shared_ptr<ModelService> model_service_;
std::shared_ptr<EngineService> engine_service_;
std::shared_ptr<ModelSourceService> model_src_svc_;
Expand Down
8 changes: 4 additions & 4 deletions engine/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@ void RunServer(std::optional<std::string> host, std::optional<int> port,
auto engine_service = std::make_shared<EngineService>(
download_service, dylib_path_manager, db_service);
auto inference_svc = std::make_shared<InferenceService>(engine_service);
auto model_src_svc = std::make_shared<ModelSourceService>();
auto model_src_svc = std::make_shared<ModelSourceService>(db_service);
auto model_service = std::make_shared<ModelService>(
hw_service, download_service, inference_svc, engine_service);
db_service, hw_service, download_service, inference_svc, engine_service);
inference_svc->SetModelService(model_service);

auto file_watcher_srv = std::make_shared<FileWatcherService>(
Expand All @@ -179,8 +179,8 @@ void RunServer(std::optional<std::string> host, std::optional<int> port,
auto thread_ctl = std::make_shared<Threads>(thread_srv, message_srv);
auto message_ctl = std::make_shared<Messages>(message_srv);
auto engine_ctl = std::make_shared<Engines>(engine_service);
auto model_ctl =
std::make_shared<Models>(model_service, engine_service, model_src_svc);
auto model_ctl = std::make_shared<Models>(db_service, model_service,
engine_service, model_src_svc);
auto event_ctl = std::make_shared<Events>(event_queue_ptr);
auto pm_ctl = std::make_shared<ProcessManager>();
auto hw_ctl = std::make_shared<Hardware>(engine_service, hw_service);
Expand Down
57 changes: 56 additions & 1 deletion engine/services/database_service.cc
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,60 @@ cpp::result<bool, std::string> DatabaseService::DeleteHardwareEntry(
const std::string& id) {
return cortex::db::Hardware().DeleteHardwareEntry(id);
}
// end hardware

// end hardware
// begin models
cpp::result<std::vector<ModelEntry>, std::string>
DatabaseService::LoadModelList() const {
return cortex::db::Models().LoadModelList();
}

cpp::result<ModelEntry, std::string> DatabaseService::GetModelInfo(
const std::string& identifier) const {
return cortex::db::Models().GetModelInfo(identifier);
}

cpp::result<bool, std::string> DatabaseService::AddModelEntry(
ModelEntry new_entry) {
return cortex::db::Models().AddModelEntry(new_entry);
}

cpp::result<bool, std::string> DatabaseService::UpdateModelEntry(
const std::string& identifier, const ModelEntry& updated_entry) {
return cortex::db::Models().UpdateModelEntry(identifier, updated_entry);
}

cpp::result<bool, std::string> DatabaseService::DeleteModelEntry(
const std::string& identifier) {
return cortex::db::Models().DeleteModelEntry(identifier);
}

cpp::result<bool, std::string> DatabaseService::DeleteModelEntryWithOrg(
const std::string& src) {
return cortex::db::Models().DeleteModelEntryWithOrg(src);
}

cpp::result<bool, std::string> DatabaseService::DeleteModelEntryWithRepo(
const std::string& src) {
return cortex::db::Models().DeleteModelEntryWithRepo(src);
}

cpp::result<std::vector<std::string>, std::string>
DatabaseService::FindRelatedModel(const std::string& identifier) const {
return cortex::db::Models().FindRelatedModel(identifier);
}

bool DatabaseService::HasModel(const std::string& identifier) const {
return cortex::db::Models().HasModel(identifier);
}

cpp::result<std::vector<std::string>, std::string>
DatabaseService::GetModelSources() const {
return cortex::db::Models().GetModelSources();
}

cpp::result<std::vector<std::string>, std::string> DatabaseService::GetModels(
const std::string& model_src) const {
return cortex::db::Models().GetModels(model_src);
}
// end models
Loading

0 comments on commit 55739c3

Please sign in to comment.