Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Offline batch inference #1477

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,10 @@ jobs:
python-version: 3.7
- name: Install test dependencies
run: ./ci/install_test_deps.sh
- name: Install fastai
run: python -m pip install fastai
- name: Run tests
run: ./ci/fastai_integration_tests.sh
run: ./ci/test_project.sh tests/integration/projects/fastai2
- name: Upload test coverage to Codecov
uses: codecov/[email protected]

Expand All @@ -141,7 +143,7 @@ jobs:
uses: codecov/[email protected]

api_server_integration_tests:
name: ${{ matrix.os }} API Server Integration Tests
name: API Server Integration Tests (${{ matrix.os }})
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
Expand All @@ -158,12 +160,14 @@ jobs:
- name: Install test dependencies
run: ./ci/install_test_deps.sh
- name: Run tests
run: ./ci/integration_tests.sh
run: ./ci/test_project.sh tests/integration/projects/general
- name: Run tests
run: ./ci/test_project.sh tests/integration/projects/general_non_batch
- name: Upload test coverage to Codecov
uses: codecov/[email protected]

back_compatibility_integration_tests:
name: ${{ matrix.os }} Back Compatibility Integration Tests
name: Backward Compatibility Integration Tests (${{ matrix.os }})
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
Expand Down
1 change: 1 addition & 0 deletions bentoml/adapters/base_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ class BaseInputAdapter:
def __init__(self, http_input_example=None, **base_config):
self._config = base_config
self._http_input_example = http_input_example
self.custom_request_schema = base_config.get('request_schema')

@property
def config(self):
Expand Down
6 changes: 5 additions & 1 deletion bentoml/service/inference_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,11 @@ def request_schema(self):
"""
:return: the HTTP API request schema in OpenAPI/Swagger format
"""
schema = self.input_adapter.request_schema
if self.input_adapter.custom_request_schema is None:
schema = self.input_adapter.request_schema
else:
schema = self.input_adapter.custom_request_schema

if schema.get('application/json'):
schema.get('application/json')[
'example'
Expand Down
16 changes: 0 additions & 16 deletions ci/fastai_integration_tests.sh

This file was deleted.

19 changes: 3 additions & 16 deletions ci/integration_tests.sh → ci/test_project.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,28 +9,15 @@ GIT_ROOT=$(git rev-parse --show-toplevel)
cd "$GIT_ROOT" || exit

# Run test
PROJECT_PATH="$GIT_ROOT/tests/integration/projects/general"
PROJECT_PATH="$GIT_ROOT/$1"
BUILD_PATH="$PROJECT_PATH/build"
python "$PROJECT_PATH/model/model.py" "$BUILD_PATH/artifacts"
python "$PROJECT_PATH/service.py" "$BUILD_PATH/artifacts" "$BUILD_PATH/dist"
if [ "$(uname)" == "Darwin" ]; then
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist"
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist" --cov=bentoml --cov-config=.coveragerc
else
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist" --docker
fi
rm -r $BUILD_PATH

# test the non batch service
PROJECT_PATH="$GIT_ROOT/tests/integration/projects/general_non_batch"
BUILD_PATH="$PROJECT_PATH/build"
python "$PROJECT_PATH/model/model.py" "$BUILD_PATH/artifacts"
python "$PROJECT_PATH/service.py" "$BUILD_PATH/artifacts" "$BUILD_PATH/dist"
if [ "$(uname)" == "Darwin" ]; then
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist"
else
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist" --docker
python -m pytest -s "$PROJECT_PATH" --bento-dist "$BUILD_PATH/dist" --docker --cov=bentoml --cov-config=.coveragerc
fi
rm -r $BUILD_PATH

Expand Down
107 changes: 71 additions & 36 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import glob
import inspect
import os
import subprocess
from typing import Callable, List, Optional, Union

import imageio
import numpy as np
Expand All @@ -11,47 +13,80 @@
from tests.bento_service_examples.example_bento_service import ExampleBentoService


# Assert HTTP server responses. Sending request asynchronously.
async def assert_http_request(
method: str,
url: str,
headers: Optional[dict] = None,
data: Optional[str] = None,
timeout: Optional[int] = None,
assert_status: Union[Callable, int, None] = None,
assert_data: Union[Callable, bytes, None] = None,
):
if assert_status is None:
assert_status = 200

import aiohttp

try:
async with aiohttp.ClientSession() as sess:
async with sess.request(
method, url, data=data, headers=headers, timeout=timeout
) as r:
r_body = await r.read()
except RuntimeError:
# the event loop has been closed due to previous task failed, ignore
return

if callable(assert_status):
assert assert_status(r.status), f"{r.status} {r_body}"
else:
assert r.status == assert_status, f"{r.status} {r_body}"

if assert_data is not None:
if callable(assert_data):
assert assert_data(r_body), r_body
else:
assert r_body == assert_data


# Assert system command std output.
def assert_command(
cmd: List[str],
assert_out: Union[Callable, str, None] = None,
assert_err: Union[Callable, str, None] = None,
strip=True,
):
with subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=os.environ,
) as proc:
stdout = proc.stdout.read().decode('utf-8')
stderr = proc.stderr.read().decode('utf-8')

if assert_out is not None:
if callable(assert_out):
assert assert_out(stdout)
elif strip:
assert stdout.strip() == assert_out.strip()
else:
assert stdout == assert_out

if assert_err is not None:
if callable(assert_err):
assert assert_err(stderr)
elif strip:
assert stderr.strip() == assert_err.strip()
else:
assert stderr == assert_err


def pytest_configure():
'''
global constants for tests
'''
# async request client
async def assert_request(
method,
url,
headers=None,
data=None,
timeout=None,
assert_status=None,
assert_data=None,
):
if assert_status is None:
assert_status = 200

import aiohttp

try:
async with aiohttp.ClientSession() as sess:
async with sess.request(
method, url, data=data, headers=headers, timeout=timeout
) as r:
r_body = await r.read()
except RuntimeError:
# the event loop has been closed due to previous task failed, ignore
return

if callable(assert_status):
assert assert_status(r.status), f"{r.status} {r_body}"
else:
assert r.status == assert_status, f"{r.status} {r_body}"

if assert_data is not None:
if callable(assert_data):
assert assert_data(r_body), r_body
else:
assert r_body == assert_data

pytest.assert_request = assert_request
pytest.assert_request = assert_http_request
pytest.assert_command = assert_command

# dataframe json orients
pytest.DF_ORIENTS = {
Expand Down
5 changes: 0 additions & 5 deletions tests/integration/fastai_utils.py

This file was deleted.

26 changes: 18 additions & 8 deletions tests/integration/projects/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,35 @@ def clean_context():
yield stack


@pytest.fixture(scope="session")
def bundle(pytestconfig):
test_svc_bundle = pytestconfig.getoption("bento_dist") or os.path.join(
sys.argv[1], "build", "dist"
)
return test_svc_bundle


@pytest.fixture(params=[True, False], scope="module")
def enable_microbatch(request):
pytest.enable_microbatch = request.param
return request.param


@pytest.fixture(scope="module")
def host(pytestconfig, clean_context, enable_microbatch):
test_svc_bundle = pytestconfig.getoption("bento_dist") or os.path.join(
sys.argv[1], "build", "dist"
)
print(test_svc_bundle)

def host(pytestconfig, bundle, clean_context, enable_microbatch):
if pytestconfig.getoption("docker"):
image = clean_context.enter_context(
build_api_server_docker_image(test_svc_bundle, "example_service")
build_api_server_docker_image(bundle, "example_service")
)
with run_api_server_docker_container(image, enable_microbatch) as host:
yield host
else:
with run_api_server(test_svc_bundle, enable_microbatch) as host:
with run_api_server(bundle, enable_microbatch) as host:
yield host


@pytest.fixture(scope="module")
def service(bundle):
import bentoml

return bentoml.load_from_dir(bundle)
Empty file.
55 changes: 55 additions & 0 deletions tests/integration/projects/fastai2/model/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import pathlib
import sys

import numpy as np
import torch
from fastai.data.block import DataBlock
from fastai.learner import Learner
from fastai.torch_core import Module
from torch import nn


def get_items(_x):
return np.ones([5, 5], np.float32)


class Model(nn.Module):
def __init__(self):
super().__init__()
self.fc = nn.Linear(5, 1, bias=False)
torch.nn.init.ones_(self.fc.weight)

def forward(self, x):
return self.fc(x)


class Loss(Module):
reduction = 'none'

def forward(self, x, _y):
return x

def activation(self, x):
return x

def decodes(self, x):
return x


def pack_models(path):
from bentoml.frameworks.fastai import FastaiModelArtifact

model = Model()
loss = Loss()

dblock = DataBlock(get_items=get_items, get_y=np.sum)
dls = dblock.datasets(None).dataloaders()
learner = Learner(dls, model, loss)

FastaiModelArtifact("model").pack(learner).save(path)


if __name__ == "__main__":
artifacts_path = sys.argv[1]
pathlib.Path(artifacts_path).mkdir(parents=True, exist_ok=True)
pack_models(artifacts_path)
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import bentoml
import pathlib
import sys

import numpy as np

import bentoml
from bentoml.adapters import DataframeInput
from bentoml.frameworks.fastai import FastaiModelArtifact

Expand All @@ -13,3 +17,16 @@ def predict(self, df):
_, _, output = self.artifacts.model.predict(input_data)

return output.squeeze().item()


if __name__ == "__main__":
artifacts_path = sys.argv[1]
bento_dist_path = sys.argv[2]
service = FastaiClassifier()

from model.model import Loss, Model # noqa # pylint: disable=unused-import

service.artifacts.load_all(artifacts_path)

pathlib.Path(bento_dist_path).mkdir(parents=True, exist_ok=True)
service.save_to_dir(bento_dist_path)
7 changes: 7 additions & 0 deletions tests/integration/projects/fastai2/tests/test_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import pandas

test_df = pandas.DataFrame([[1] * 5])


def test_fastai2_artifact_pack(service):
assert service.predict(test_df) == 5.0, 'Run inference before saving'
17 changes: 17 additions & 0 deletions tests/integration/projects/general/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,23 @@ def predict_json(self, input_datas):
def customezed_route(self, input_datas):
return input_datas

CUSTOM_SCHEMA = {
"application/json": {
"schema": {
"type": "object",
"required": ["field1", "field2"],
"properties": {
"field1": {"type": "string"},
"field2": {"type": "uuid"},
},
},
}
}

@bentoml.api(input=JsonInput(request_schema=CUSTOM_SCHEMA), batch=True)
def customezed_schema(self, input_datas):
return input_datas

@bentoml.api(input=JsonInput(), batch=True)
def predict_strict_json(self, input_datas, tasks: Sequence[InferenceTask] = None):
filtered_jsons = []
Expand Down
Loading