Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add logic to handle Python-based backends instead of platform handlers #303

Merged
merged 8 commits into from
Oct 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -309,13 +309,6 @@ install(
${INSTALL_CONFIGDIR}
)

install(
DIRECTORY
src/resources/platform_handlers
DESTINATION
${CMAKE_INSTALL_PREFIX}/backends/python
)

install(
FILES
src/resources/triton_python_backend_utils.py
Expand Down
66 changes: 20 additions & 46 deletions src/pb_stub.cc
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,10 @@ Stub::Instantiate(
const std::string& shm_region_name, const std::string& model_path,
const std::string& model_version, const std::string& triton_install_path,
bi::managed_external_buffer::handle_t ipc_control_handle,
const std::string& name, const std::string& platform)
const std::string& name, const std::string& python_runtime_model)
{
model_context_.Init(model_path, platform, triton_install_path, model_version);
model_context_.Init(
model_path, python_runtime_model, triton_install_path, model_version);
name_ = name;
health_mutex_ = nullptr;
initialized_ = false;
Expand Down Expand Up @@ -1659,57 +1660,29 @@ PYBIND11_EMBEDDED_MODULE(c_python_backend_utils, module)

void
ModelContext::Init(
const std::string& model_path, const std::string& platform,
const std::string& model_path, const std::string& runtime_modeldir,
const std::string& triton_install_path, const std::string& model_version)
{
bool python_model_found = false;
std::string platform_model_path;

if (platform != "NONE") {
platform_model_path =
triton_install_path + "/platform_handlers/" + platform + "/model.py";
// Check if model file exists in the path.
struct stat buffer;
if (stat(platform_model_path.c_str(), &buffer) == 0) {
// Use the Platform model for serving the model.
python_model_found = true;
type_ = ModelType::PLATFORM;
python_model_path_ = platform_model_path;
// Trimming the model name from the model path, the platform model
// will populate the expected default model file name into model_path_.
model_dir_ = model_path.substr(0, model_path.find_last_of("\\/"));
} else {
LOG_WARN << "Unable to find model(handler) \'" << platform_model_path
<< "\' for platform field \'" << platform << "\'";
}
}

if (!python_model_found) {
type_ = ModelType::DEFAULT;
if (runtime_modeldir != "DEFAULT") {
// For python based backends, existence of `model.py` in the corresponding
// backend folder happens on the core side, so we can omit this check here.
python_model_path_ = runtime_modeldir + "/model.py";
tanmayv25 marked this conversation as resolved.
Show resolved Hide resolved
type_ = ModelType::BACKEND;
} else {
python_model_path_ = model_path;
// Check if model file exists in this path.
struct stat buffer;
if (stat(python_model_path_.c_str(), &buffer) == 0) {
python_model_found = true;
type_ = ModelType::DEFAULT;
}
// Initializing here for consistency with platform model case.
model_dir_ = model_path.substr(0, model_path.find_last_of("\\/"));
}

if (!python_model_found) {
if (platform != "NONE") {
throw PythonBackendException(
("Python model file not found in neither \'" + platform_model_path +
"\' nor \'" + model_path + "\'"));
} else {
if (stat(python_model_path_.c_str(), &buffer) != 0) {
throw PythonBackendException(
("Python model file not found in \'" + model_path + "\'"));
}
}

model_dir_ = model_path.substr(0, model_path.find_last_of("\\/"));
Tabrizian marked this conversation as resolved.
Show resolved Hide resolved
python_backend_folder_ = triton_install_path;
model_version_ = model_version;
platform_ = platform;
runtime_modeldir_ = runtime_modeldir;
}

void
Expand Down Expand Up @@ -1740,9 +1713,10 @@ ModelContext::StubSetup(py::module& sys)
sys = py::module_::import(
(std::string(model_version_) + "." + model_name_trimmed).c_str());
} else {
std::string platform_model_dir(
python_backend_folder_ + "/platform_handlers/" + platform_ + "/");
sys.attr("path").attr("append")(platform_model_dir);
std::string model_path_parent =
python_model_path_.substr(0, python_model_path_.find_last_of("/"));
std::string backend_model_dir(model_path_parent);
sys.attr("path").attr("append")(backend_model_dir);
sys.attr("path").attr("append")(python_backend_folder_);
sys = py::module_::import(model_name_trimmed.c_str());
}
Expand Down Expand Up @@ -1791,14 +1765,14 @@ main(int argc, char** argv)
int64_t shm_growth_size = std::stol(argv[4]);
std::string triton_install_path = argv[6];
std::string name = argv[8];
std::string platform = argv[9];
std::string runtime_modeldir = argv[9];

std::unique_ptr<Stub>& stub = Stub::GetOrCreateInstance();
try {
stub->Instantiate(
shm_growth_size, shm_default_size, shm_region_name, model_path,
model_version, argv[6] /* triton install path */,
std::stoi(argv[7]) /* IPCControl handle */, name, platform);
std::stoi(argv[7]) /* IPCControl handle */, name, runtime_modeldir);
}
catch (const PythonBackendException& pb_exception) {
LOG_INFO << "Failed to preinitialize Python stub: " << pb_exception.what();
Expand Down
15 changes: 11 additions & 4 deletions src/pb_stub.h
Original file line number Diff line number Diff line change
Expand Up @@ -180,9 +180,15 @@ class ModelContext {
std::string model_dir_;
std::string model_version_;
std::string python_backend_folder_;
std::string platform_;

enum ModelType { DEFAULT, PLATFORM };
std::string runtime_modeldir_;

// Triton supports python-based backends,
// i.e. backends that provide common `model.py`, that can be re-used
// between different models. `ModelType` helps to differentiate
// between models running with c++ python backend (ModelType::DEFAULT)
// and models running with python-based backend (ModelType::BACKEND)
// at the time of ModelContext::StubSetup to properly set up paths.
enum ModelType { DEFAULT, BACKEND };
tanmayv25 marked this conversation as resolved.
Show resolved Hide resolved
ModelType type_;
};

Expand Down Expand Up @@ -210,7 +216,8 @@ class Stub {
const std::string& shm_region_name, const std::string& model_path,
const std::string& model_version, const std::string& triton_install_path,
bi::managed_external_buffer::handle_t ipc_control_handle,
const std::string& model_instance_name, const std::string& platform);
const std::string& model_instance_name,
const std::string& runtime_modeldir);

/// Get the health of the stub process.
bool& Health();
Expand Down
67 changes: 56 additions & 11 deletions src/python_be.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1771,11 +1771,12 @@ ModelState::ModelState(TRITONBACKEND_Model* triton_model)
python_execution_env_ = "";
force_cpu_only_input_tensors_ = true;
decoupled_ = false;
platform_ = "";

void* bstate;
THROW_IF_BACKEND_MODEL_ERROR(TRITONBACKEND_BackendState(backend, &bstate));
backend_state_ = reinterpret_cast<BackendState*>(bstate);

runtime_modeldir_ = backend_state_->runtime_modeldir;
triton::common::TritonJson::Value params;
common::TritonJson::Value model_config;
if (model_config_.Find("parameters", &params)) {
Expand Down Expand Up @@ -1812,14 +1813,6 @@ ModelState::ModelState(TRITONBACKEND_Model* triton_model)
}
}

triton::common::TritonJson::Value platform;
if (model_config_.Find("platform", &platform)) {
auto error = platform.AsString(&platform_);
if (error != nullptr) {
throw BackendModelException(error);
}
}

// Skip the FORCE_CPU_ONLY_INPUT_TENSORS variable if it doesn't exits.
std::string force_cpu_only_input_tensor;
error = nullptr;
Expand Down Expand Up @@ -1948,8 +1941,11 @@ TRITONBACKEND_Initialize(TRITONBACKEND_Backend* backend)
backend_state->shm_message_queue_size = 1000;
backend_state->number_of_instance_inits = 0;
backend_state->thread_pool_size = 32;
// Initialize shared memory region prefix to include backend's name
// to avoid collision between python backend and python-based backends.
backend_state->shared_memory_region_prefix =
"triton_python_backend_shm_region_";
"triton_" + name + "_backend_shm_region_";
tanmayv25 marked this conversation as resolved.
Show resolved Hide resolved
std::string default_backend_dir_string;

if (backend_config.Find("cmdline", &cmdline)) {
triton::common::TritonJson::Value shm_growth_size;
Expand Down Expand Up @@ -2059,6 +2055,12 @@ TRITONBACKEND_Initialize(TRITONBACKEND_Backend* backend)
return TRITONSERVER_ErrorNew(TRITONSERVER_ERROR_INVALID_ARG, ia.what());
}
}

triton::common::TritonJson::Value default_backend_dir;
if (cmdline.Find("backend-directory", &default_backend_dir)) {
RETURN_IF_ERROR(
default_backend_dir.AsString(&default_backend_dir_string));
}
}

LOG_MESSAGE(
Expand All @@ -2076,7 +2078,50 @@ TRITONBACKEND_Initialize(TRITONBACKEND_Backend* backend)
TRITONBACKEND_ArtifactType artifact_type;
RETURN_IF_ERROR(
TRITONBACKEND_BackendArtifacts(backend, &artifact_type, &location));
backend_state->python_lib = location;

// Check if `triton_python_backend_stub` and `triton_python_backend_utils.py`
// are located under `location`.
// DLIS-5596: Add forward slash to be platform agnostic
// (i.e. For Windows, we need to use backward slash).
std::string default_python_backend_dir =
tanmayv25 marked this conversation as resolved.
Show resolved Hide resolved
default_backend_dir_string + "/python";
std::string backend_stub_path =
std::string(location) + "/triton_python_backend_stub";
std::string backend_utils =
std::string(location) + "/triton_python_backend_utils.py";
// Both, stub and utils should be in the same location
if (FileExists(backend_stub_path) && FileExists(backend_utils)) {
backend_state->python_lib = location;
// If `location` is default location of a python backend,
// then we are using default python backend.
if (default_python_backend_dir == std::string(location)) {
backend_state->runtime_modeldir = "";
} else {
// If `location` is not default location of a python backend,
// then we are using a python backend based backend and model.py stored
// in the received location.
backend_state->runtime_modeldir = location;
}
} else {
// If stub and utils are not found in received `location`,
// then we are using a python backend based backend and stub and utils are
// stored in the default python backend location.
if (!default_backend_dir_string.empty()) {
std::string backend_stub_path =
default_backend_dir_string + "/python/triton_python_backend_stub";
if (!FileExists(backend_stub_path)) {
return TRITONSERVER_ErrorNew(
TRITONSERVER_ERROR_NOT_FOUND,
(std::string("triton_python_backend_stub") +
" is not found. Searched paths: " + default_backend_dir_string +
"/python and" + std::string(location))
.c_str());
}
}
backend_state->runtime_modeldir = location;
backend_state->python_lib = default_backend_dir_string + "/python";
}

backend_state->env_manager = std::make_unique<EnvironmentManager>();

RETURN_IF_ERROR(TRITONBACKEND_BackendSetState(
Expand Down
7 changes: 4 additions & 3 deletions src/python_be.h
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@ struct BackendState {
std::string shared_memory_region_prefix;
int64_t thread_pool_size;
std::unique_ptr<EnvironmentManager> env_manager;
std::string runtime_modeldir;
};

class ModelState : public BackendModel {
Expand All @@ -237,8 +238,8 @@ class ModelState : public BackendModel {
// Is decoupled API being used.
bool IsDecoupled() { return decoupled_; }

// Returns the value in the platform field
std::string Platform() { return platform_; }
// Returns the value in the `runtime_modeldir_` field
std::string RuntimeModelDir() { return runtime_modeldir_; }

// Launch auto-complete stub process.
TRITONSERVER_Error* LaunchAutoCompleteStubProcess();
Expand All @@ -255,7 +256,7 @@ class ModelState : public BackendModel {
std::string python_execution_env_;
bool force_cpu_only_input_tensors_;
bool decoupled_;
std::string platform_;
std::string runtime_modeldir_;
std::unique_ptr<StubLauncher> auto_complete_stub_;
};

Expand Down
Loading
Loading