Skip to content

Commit

Permalink
chore: refactor utils (#1760)
Browse files Browse the repository at this point in the history
* chore: config_yaml_utils

* chore: file_manager_utils

* chore: curl_utils

* chore: system_info_utils

* chore: clean e2e tests

* fix: build macos

* fix: docker e2e tests

* fix: e2e docker

* fix: e2e tests

---------

Co-authored-by: vansangpfiev <[email protected]>
  • Loading branch information
vansangpfiev and sangjanai authored Dec 3, 2024
1 parent d5231eb commit 5eda212
Show file tree
Hide file tree
Showing 30 changed files with 1,198 additions and 1,232 deletions.
2 changes: 1 addition & 1 deletion docker/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ echo "enableCors: true" >> /root/.cortexrc

# Install the engine
cortex engines install llama-cpp -s /opt/cortex.llamacpp
cortex engines list

# Start the cortex server
cortex start
cortex engines list

# Keep the container running by tailing the log files
tail -f /root/cortexcpp/logs/cortex.log &
Expand Down
3 changes: 2 additions & 1 deletion engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -176,10 +176,11 @@ aux_source_directory(cortex-common CORTEX_COMMON)
aux_source_directory(config CONFIG_SRC)
aux_source_directory(database DB_SRC)
aux_source_directory(migrations MIGR_SRC)
aux_source_directory(utils UTILS_SRC)

target_include_directories(${TARGET_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} )

target_sources(${TARGET_NAME} PRIVATE ${CONFIG_SRC} ${CTL_SRC} ${COMMON_SRC} ${SERVICES_SRC} ${DB_SRC} ${MIGR_SRC})
target_sources(${TARGET_NAME} PRIVATE ${UTILS_SRC} ${CONFIG_SRC} ${CTL_SRC} ${COMMON_SRC} ${SERVICES_SRC} ${DB_SRC} ${MIGR_SRC})

set_target_properties(${TARGET_NAME} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_BINARY_DIR}
Expand Down
4 changes: 4 additions & 0 deletions engine/cli/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ add_executable(${TARGET_NAME} main.cc
${CMAKE_CURRENT_SOURCE_DIR}/../services/hardware_service.cc
${CMAKE_CURRENT_SOURCE_DIR}/utils/easywsclient.cc
${CMAKE_CURRENT_SOURCE_DIR}/utils/download_progress.cc
${CMAKE_CURRENT_SOURCE_DIR}/../utils/config_yaml_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../utils/file_manager_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../utils/curl_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../utils/system_info_utils.cc
)

target_link_libraries(${TARGET_NAME} PRIVATE CLI11::CLI11)
Expand Down
1 change: 1 addition & 0 deletions engine/cli/commands/engine_list_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "common/engine_servicei.h"
#include "server_start_cmd.h"
#include "utils/curl_utils.h"
#include "utils/engine_constants.h"
#include "utils/logging_utils.h"
#include "utils/url_parser.h"
// clang-format off
Expand Down
1 change: 1 addition & 0 deletions engine/cli/commands/ps_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include <string>
#include <tabulate/table.hpp>
#include "utils/curl_utils.h"
#include "utils/engine_constants.h"
#include "utils/format_utils.h"
#include "utils/logging_utils.h"
#include "utils/string_utils.h"
Expand Down
5 changes: 5 additions & 0 deletions engine/cli/commands/server_start_cmd.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#include "server_start_cmd.h"
#include "commands/cortex_upd_cmd.h"
#include "utils/cortex_utils.h"
#include "utils/engine_constants.h"
#include "utils/file_manager_utils.h"
#include "utils/widechar_conv.h"

Expand All @@ -27,6 +28,10 @@ bool TryConnectToServer(const std::string& host, int port) {

bool ServerStartCmd::Exec(const std::string& host, int port,
const std::optional<std::string>& log_level) {
if (IsServerAlive(host, port)) {
CLI_LOG("The server has already started");
return true;
}
std::string log_level_;
if (!log_level.has_value()) {
log_level_ = "INFO";
Expand Down
2 changes: 1 addition & 1 deletion engine/cli/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,6 @@ int main(int argc, char* argv[]) {

if (should_check_for_latest_llamacpp_version) {
std::thread t1([]() {
auto config = file_manager_utils::GetCortexConfig();
// TODO: namh current we only check for llamacpp. Need to add support for other engine
auto get_latest_version = []() -> cpp::result<std::string, std::string> {
try {
Expand Down Expand Up @@ -176,6 +175,7 @@ int main(int argc, char* argv[]) {

auto now = std::chrono::system_clock::now();
CTL_DBG("latest llama.cpp version: " << res.value());
auto config = file_manager_utils::GetCortexConfig();
config.checkedForLlamacppUpdateAt =
std::chrono::duration_cast<std::chrono::milliseconds>(
now.time_since_epoch())
Expand Down
16 changes: 3 additions & 13 deletions engine/e2e-test/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,16 @@

### e2e tests are expensive, have to keep engines tests in order
from test_api_engine_list import TestApiEngineList
from test_api_engine_install import TestApiEngineInstall
from test_api_engine_get import TestApiEngineGet

### models, keeps in order, note that we only uninstall engine after finishing all models test
from test_api_model_pull_direct_url import TestApiModelPullDirectUrl
from test_api_model_start_stop import TestApiModelStartStop
from test_api_model_get import TestApiModelGet
from test_api_model_list import TestApiModelList
from test_api_model_update import TestApiModelUpdate
from test_api_model_delete import TestApiModelDelete
from test_api_engine import TestApiEngine
from test_api_model import TestApiModel
from test_api_model_import import TestApiModelImport
from test_api_engine_uninstall import TestApiEngineUninstall

###
from test_cli_engine_get import TestCliEngineGet
from test_cli_engine_install import TestCliEngineInstall
from test_cli_engine_list import TestCliEngineList
from test_cli_engine_uninstall import TestCliEngineUninstall
from test_cli_model_delete import TestCliModelDelete
from test_cli_model_pull_direct_url import TestCliModelPullDirectUrl
from test_cli_model import TestCliModel
from test_cli_server_start import TestCliServerStart
from test_cortex_update import TestCortexUpdate
from test_create_log_folder import TestCreateLogFolder
Expand Down
Original file line number Diff line number Diff line change
@@ -1,29 +1,49 @@
import time

import pytest
import requests
import time
from test_runner import (
run,
start_server_if_needed,
start_server,
stop_server,
wait_for_websocket_download_success_event,
)


class TestApiEngineUninstall:
class TestApiEngine:

@pytest.fixture(autouse=True)
def setup_and_teardown(self):
# Setup
start_server_if_needed()
success = start_server()
if not success:
raise Exception("Failed to start server")

yield

# Teardown
stop_server()

# engines get
def test_engines_get_llamacpp_should_be_successful(self):
response = requests.get("http://localhost:3928/engines/llama-cpp")
assert response.status_code == 200

# engines install
def test_engines_install_llamacpp_specific_version_and_variant(self):
data = {"version": "v0.1.35-27.10.24", "variant": "linux-amd64-avx-cuda-11-7"}
response = requests.post(
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
)
assert response.status_code == 200

def test_engines_install_llamacpp_specific_version_and_null_variant(self):
data = {"version": "v0.1.35-27.10.24"}
response = requests.post(
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
)
assert response.status_code == 200

# engines uninstall
@pytest.mark.asyncio
async def test_engines_uninstall_llamacpp_should_be_successful(self):
async def test_engines_install_uninstall_llamacpp_should_be_successful(self):
response = requests.post("http://localhost:3928/v1/engines/llama-cpp/install")
assert response.status_code == 200
await wait_for_websocket_download_success_event(timeout=None)
Expand All @@ -33,7 +53,7 @@ async def test_engines_uninstall_llamacpp_should_be_successful(self):
assert response.status_code == 200

@pytest.mark.asyncio
async def test_engines_uninstall_llamacpp_with_only_version_should_be_failed(self):
async def test_engines_install_uninstall_llamacpp_with_only_version_should_be_failed(self):
# install first
data = {"variant": "mac-arm64"}
install_response = requests.post(
Expand All @@ -50,7 +70,7 @@ async def test_engines_uninstall_llamacpp_with_only_version_should_be_failed(sel
assert response.json()["message"] == "No variant provided"

@pytest.mark.asyncio
async def test_engines_uninstall_llamacpp_with_variant_should_be_successful(self):
async def test_engines_install_uninstall_llamacpp_with_variant_should_be_successful(self):
# install first
data = {"variant": "mac-arm64"}
install_response = requests.post(
Expand All @@ -62,7 +82,7 @@ async def test_engines_uninstall_llamacpp_with_variant_should_be_successful(self
response = requests.delete("http://127.0.0.1:3928/v1/engines/llama-cpp/install")
assert response.status_code == 200

def test_engines_uninstall_llamacpp_with_specific_variant_and_version_should_be_successful(
def test_engines_install_uninstall_llamacpp_with_specific_variant_and_version_should_be_successful(
self,
):
data = {"variant": "mac-arm64", "version": "v0.1.35"}
Expand All @@ -76,3 +96,5 @@ def test_engines_uninstall_llamacpp_with_specific_variant_and_version_should_be_
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
)
assert response.status_code == 200


22 changes: 0 additions & 22 deletions engine/e2e-test/test_api_engine_get.py

This file was deleted.

36 changes: 0 additions & 36 deletions engine/e2e-test/test_api_engine_install.py

This file was deleted.

2 changes: 1 addition & 1 deletion engine/e2e-test/test_api_engine_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ def setup_and_teardown(self):

def test_engines_list_api_run_successfully(self):
response = requests.get("http://localhost:3928/engines")
assert response.status_code == 200
assert response.status_code == 200
Loading

0 comments on commit 5eda212

Please sign in to comment.