diff --git a/mlem/api/commands.py b/mlem/api/commands.py index b4226a72..29473ccd 100644 --- a/mlem/api/commands.py +++ b/mlem/api/commands.py @@ -4,7 +4,6 @@ import posixpath from typing import Any, Dict, Iterable, List, Optional, Type, Union -import numpy as np from fsspec import AbstractFileSystem from fsspec.implementations.local import LocalFileSystem @@ -34,10 +33,10 @@ MlemLink, MlemModel, MlemObject, + MlemPackager, ) -from mlem.pack import Packager -from mlem.runtime.client.base import BaseClient -from mlem.runtime.server.base import Server +from mlem.runtime.client import Client +from mlem.runtime.server import Server from mlem.ui import ( EMOJI_APPLY, EMOJI_COPY, @@ -91,8 +90,7 @@ def apply( batch_dataset = get_dataset_value(part, batch_size) for chunk in batch_dataset: preds = w.call_method(resolved_method, chunk.data) - res += [*preds] - res = [np.array(res)] + res += [*preds] # TODO: merge results else: res = [ w.call_method(resolved_method, get_dataset_value(part)) @@ -103,17 +101,12 @@ def apply( return res[0] return res if len(res) == 1: - return save( - res[0], output, repo=target_repo, external=external, index=index - ) - - raise NotImplementedError( - "Saving several input data objects is not implemented yet" - ) + res = res[0] + return save(res, output, repo=target_repo, external=external, index=index) def apply_remote( - client: Union[str, BaseClient], + client: Union[str, Client], *data: Union[str, MlemDataset, Any], method: str = None, output: str = None, @@ -124,7 +117,7 @@ def apply_remote( """Apply provided model against provided data Args: - client (BaseClient): The client to access methods of deployed model. + client (Client): The client to access methods of deployed model. data (Any): Input to the model. method (str, optional): Which model method to use. If None, use the only method model has. @@ -138,7 +131,7 @@ def apply_remote( Otherwise returns None. """ - client = ensure_mlem_object(BaseClient, client, **client_kwargs) + client = ensure_mlem_object(Client, client, **client_kwargs) if method is not None: try: resolved_method = getattr(client, method) @@ -154,11 +147,8 @@ def apply_remote( return res[0] return res if len(res) == 1: - return save(res[0], output, repo=target_repo, index=index) - - raise NotImplementedError( - "Saving several input data objects is not implemented yet" - ) + res = res[0] + return save(res, output, repo=target_repo, index=index) def clone( @@ -317,21 +307,21 @@ def link( def pack( - packager: Union[str, Packager], + packager: Union[str, MlemPackager], model: Union[str, MlemModel], **packager_kwargs, ): """Pack model in docker-build-ready folder or directly build a docker image. Args: - packager (Union[str, Packager]): Packager to use. + packager (Union[str, MlemPackager]): Packager to use. Out-of-the-box supported string values are "docker_dir" and "docker". model (Union[str, MlemModel]): The model to pack. """ model = get_model_meta(model) - return ensure_mlem_object(Packager, packager, **packager_kwargs).package( - model - ) + return ensure_mlem_object( + MlemPackager, packager, **packager_kwargs + ).package(model) def serve(model: MlemModel, server: Union[Server, str], **server_kwargs): @@ -341,7 +331,7 @@ def serve(model: MlemModel, server: Union[Server, str], **server_kwargs): model (MlemModel): The model to serve. server (Union[Server, str]): Out-of-the-box supported one is "fastapi". """ - from mlem.runtime.interface.base import ModelInterface + from mlem.runtime.interface import ModelInterface model.load_value() interface = ModelInterface(model_type=model.model_type) diff --git a/mlem/api/utils.py b/mlem/api/utils.py index 1b8e35c4..3b52d80b 100644 --- a/mlem/api/utils.py +++ b/mlem/api/utils.py @@ -9,7 +9,7 @@ def get_dataset_value(dataset: Any, batch_size: Optional[int] = None) -> Any: if isinstance(dataset, str): - return load(dataset) + return load(dataset, batch_size=batch_size) if isinstance(dataset, MlemDataset): # TODO: https://github.com/iterative/mlem/issues/29 # fix discrepancies between model and data meta objects diff --git a/mlem/cli/apply.py b/mlem/cli/apply.py index d4b473b1..45cb9060 100644 --- a/mlem/cli/apply.py +++ b/mlem/cli/apply.py @@ -26,7 +26,7 @@ from mlem.core.metadata import load_meta from mlem.core.objects import MlemDataset, MlemModel from mlem.ext import list_implementations -from mlem.runtime.client.base import BaseClient +from mlem.runtime.client import Client from mlem.ui import set_echo @@ -123,7 +123,7 @@ def apply( def apply_remote( subtype: str = Argument( "", - help=f"Type of client. Choices: {list_implementations(BaseClient)}", + help=f"Type of client. Choices: {list_implementations(Client)}", show_default=False, ), data: str = Argument(..., help="Path to dataset object"), @@ -146,7 +146,7 @@ def apply_remote( Apply hosted mlem model to local mlem dataset $ mlem apply-remote http mydataset -c host="0.0.0.0" -c port=8080 --output myprediction """ - client = config_arg(BaseClient, load, subtype, conf, file_conf) + client = config_arg(Client, load, subtype, conf, file_conf) with set_echo(None if json else ...): result = run_apply_remote( @@ -163,7 +163,7 @@ def apply_remote( def run_apply_remote( - client: BaseClient, + client: Client, data: str, repo, rev, diff --git a/mlem/cli/package.py b/mlem/cli/package.py index a4ce4ba6..a1091247 100644 --- a/mlem/cli/package.py +++ b/mlem/cli/package.py @@ -12,9 +12,8 @@ option_rev, ) from mlem.core.metadata import load_meta -from mlem.core.objects import MlemModel +from mlem.core.objects import MlemModel, MlemPackager from mlem.ext import list_implementations -from mlem.pack import Packager @mlem_command("pack", section="runtime") @@ -22,7 +21,7 @@ def pack( model: str = Argument(..., help="Path to model"), subtype: str = Argument( "", - help=f"Type of packing. Choices: {list_implementations(Packager)}", + help=f"Type of packing. Choices: {list_implementations(MlemPackager)}", show_default=False, ), repo: Optional[str] = option_repo, @@ -45,6 +44,6 @@ def pack( from mlem.api.commands import pack pack( - config_arg(Packager, load, subtype, conf, file_conf), + config_arg(MlemPackager, load, subtype, conf, file_conf), load_meta(model, repo, rev, force_type=MlemModel), ) diff --git a/mlem/cli/serve.py b/mlem/cli/serve.py index 75863969..c004c014 100644 --- a/mlem/cli/serve.py +++ b/mlem/cli/serve.py @@ -14,7 +14,7 @@ from mlem.core.metadata import load_meta from mlem.core.objects import MlemModel from mlem.ext import list_implementations -from mlem.runtime.server.base import Server +from mlem.runtime.server import Server @mlem_command("serve", section="runtime") diff --git a/mlem/config.py b/mlem/config.py index 173ad86b..159aa045 100644 --- a/mlem/config.py +++ b/mlem/config.py @@ -52,6 +52,8 @@ def inner(settings: BaseSettings) -> Dict[str, Any]: class MlemConfigBase(BaseSettings): + """Special base for mlem settings to be able to read them from files""" + config_path: str = "" config_fs: Optional[AbstractFileSystem] = None @@ -78,6 +80,8 @@ def customise_sources( class MlemConfig(MlemConfigBase): + """Base Mlem Config""" + GITHUB_USERNAME: Optional[str] = Field(default=None, env="GITHUB_USERNAME") GITHUB_TOKEN: Optional[str] = Field(default=None, env="GITHUB_TOKEN") ADDITIONAL_EXTENSIONS_RAW: str = Field( diff --git a/mlem/contrib/docker/base.py b/mlem/contrib/docker/base.py index 87936b07..58fe93ac 100644 --- a/mlem/contrib/docker/base.py +++ b/mlem/contrib/docker/base.py @@ -20,9 +20,8 @@ ) from mlem.core.base import MlemABC from mlem.core.errors import DeploymentError -from mlem.core.objects import MlemModel -from mlem.pack import Packager -from mlem.runtime.server.base import Server +from mlem.core.objects import MlemModel, MlemPackager +from mlem.runtime.server import Server from mlem.ui import EMOJI_BUILD, EMOJI_OK, EMOJI_UPLOAD, echo logger = logging.getLogger(__name__) @@ -302,7 +301,7 @@ class _DockerPackMixin(BaseModel): args: DockerBuildArgs = DockerBuildArgs() -class DockerDirPackager(Packager, _DockerPackMixin): +class DockerDirPackager(MlemPackager, _DockerPackMixin): type: ClassVar[str] = "docker_dir" target: str @@ -318,7 +317,7 @@ def package(self, obj: MlemModel): return docker_dir -class DockerImagePackager(Packager, _DockerPackMixin): +class DockerImagePackager(MlemPackager, _DockerPackMixin): type: ClassVar[str] = "docker" image: DockerImage env: DockerEnv = DockerEnv() diff --git a/mlem/contrib/docker/context.py b/mlem/contrib/docker/context.py index a697988c..9f8ebb33 100644 --- a/mlem/contrib/docker/context.py +++ b/mlem/contrib/docker/context.py @@ -15,7 +15,7 @@ import mlem from mlem.core.objects import MlemModel from mlem.core.requirements import Requirements, UnixPackageRequirement -from mlem.runtime.server.base import Server +from mlem.runtime.server import Server from mlem.ui import EMOJI_BUILD, EMOJI_PACK, echo, no_echo from mlem.utils.module import get_python_version from mlem.utils.templates import TemplateModel diff --git a/mlem/contrib/docker/helpers.py b/mlem/contrib/docker/helpers.py index 2cceb069..658669b9 100644 --- a/mlem/contrib/docker/helpers.py +++ b/mlem/contrib/docker/helpers.py @@ -1,5 +1,5 @@ from mlem.core.objects import MlemModel -from mlem.runtime.server.base import Server +from mlem.runtime.server import Server from . import DockerImagePackager from .base import DockerBuildArgs, DockerEnv, DockerImage diff --git a/mlem/contrib/fastapi.py b/mlem/contrib/fastapi.py index bc2f4d9d..1d5fb6ec 100644 --- a/mlem/contrib/fastapi.py +++ b/mlem/contrib/fastapi.py @@ -11,8 +11,8 @@ from mlem.core.model import Signature from mlem.core.requirements import LibRequirementsMixin -from mlem.runtime.interface.base import Interface -from mlem.runtime.server.base import Server +from mlem.runtime.interface import Interface +from mlem.runtime.server import Server from mlem.ui import EMOJI_NAILS, echo logger = logging.getLogger(__name__) diff --git a/mlem/contrib/heroku/meta.py b/mlem/contrib/heroku/meta.py index 1750d288..f81406f1 100644 --- a/mlem/contrib/heroku/meta.py +++ b/mlem/contrib/heroku/meta.py @@ -4,7 +4,7 @@ from pydantic import BaseModel from mlem.core.objects import DeployState, DeployStatus, MlemDeploy, MlemEnv -from mlem.runtime.client.base import BaseClient, HTTPClient +from mlem.runtime.client import Client, HTTPClient from ...core.errors import DeploymentError from ...ui import EMOJI_OK, echo @@ -39,7 +39,7 @@ def ensured_app(self) -> HerokuAppMeta: raise ValueError("App is not created yet") return self.app - def get_client(self) -> BaseClient: + def get_client(self) -> Client: return HTTPClient( host=urlparse(self.ensured_app.web_url).netloc, port=80 ) diff --git a/mlem/contrib/pip/base.py b/mlem/contrib/pip/base.py index bb094243..33baacf8 100644 --- a/mlem/contrib/pip/base.py +++ b/mlem/contrib/pip/base.py @@ -11,9 +11,8 @@ import mlem from mlem.core.meta_io import get_fs, get_uri -from mlem.core.objects import MlemModel +from mlem.core.objects import MlemModel, MlemPackager from mlem.core.requirements import InstallableRequirement -from mlem.pack import Packager from mlem.ui import EMOJI_PACK, echo, no_echo from mlem.utils.module import get_python_version from mlem.utils.templates import TemplateModel @@ -77,7 +76,7 @@ def make_distr(self, obj: MlemModel, root: str, fs: AbstractFileSystem): ) -class PipPackager(Packager, PipMixin): +class PipPackager(MlemPackager, PipMixin): type: ClassVar = "pip" target: str @@ -86,7 +85,7 @@ def package(self, obj: MlemModel): self.make_distr(obj, root, fs) -class WhlPackager(Packager, PipMixin): +class WhlPackager(MlemPackager, PipMixin): type: ClassVar = "whl" target: str diff --git a/mlem/contrib/rabbitmq.py b/mlem/contrib/rabbitmq.py index 0fe6fdcf..14bea59f 100644 --- a/mlem/contrib/rabbitmq.py +++ b/mlem/contrib/rabbitmq.py @@ -10,9 +10,9 @@ from mlem.core.errors import MlemError from mlem.core.model import Signature from mlem.runtime import Interface -from mlem.runtime.client.base import BaseClient -from mlem.runtime.interface.base import InterfaceDescriptor -from mlem.runtime.server.base import Server +from mlem.runtime.client import Client +from mlem.runtime.interface import InterfaceDescriptor +from mlem.runtime.server import Server from mlem.ui import EMOJI_NAILS, echo REQUEST = "_request" @@ -95,7 +95,7 @@ def serve(self, interface: Interface): self.channel.start_consuming() -class RabbitMQClient(BaseClient, RabbitMQMixin): +class RabbitMQClient(Client, RabbitMQMixin): type: ClassVar = "rmq" timeout: float = 0 diff --git a/mlem/core/artifacts.py b/mlem/core/artifacts.py index c281f703..ee17d58a 100644 --- a/mlem/core/artifacts.py +++ b/mlem/core/artifacts.py @@ -28,7 +28,8 @@ class ArtifactInfo(TypedDict): class Artifact(MlemABC, ABC): - """""" + """Artifact represent a file in some storage. It can be opened for reading, + downloaded to local fs or removed.""" class Config: type_root = True @@ -95,6 +96,8 @@ def info(self): class FSSpecArtifact(Artifact): + """Represents a file stored in an fsspec filesystem""" + type: ClassVar = "fsspec" uri: str @@ -161,7 +164,8 @@ def relative_to(self, location: Location) -> "Artifact": class Storage(MlemABC, ABC): - """""" + """Storage represents a place where `Artifact`s can be stored. Storage can be + used to upload local file or open a path in this storage for writing""" class Config: type_root = True @@ -186,6 +190,8 @@ def open(self, path) -> Iterator[Tuple[IO, Artifact]]: class FSSpecStorage(Storage): + """Represents an fsspec filesystem""" + class Config: exclude = {"fs", "base_path"} arbitrary_types_allowed = True @@ -252,6 +258,8 @@ def from_fs_path(cls, fs: AbstractFileSystem, path: str): class LocalStorage(FSSpecStorage): + """Special case for local filesystem""" + type: ClassVar = "local" fs: AbstractFileSystem = LocalFileSystem() @@ -285,6 +293,8 @@ def open(self, path) -> Iterator[Tuple[IO, "LocalArtifact"]]: class LocalArtifact(FSSpecArtifact): + """Special case for local file""" + type: ClassVar = "local" def relative(self, fs: AbstractFileSystem, path: str) -> "FSSpecArtifact": diff --git a/mlem/core/base.py b/mlem/core/base.py index 2e337341..822979d4 100644 --- a/mlem/core/base.py +++ b/mlem/core/base.py @@ -69,7 +69,7 @@ def load_impl_ext( class MlemABC(PolyModel): """ Base class for all MLEM Python objects - which should be serialized and deserialized + that should be serializable and polymorphic """ abs_types: ClassVar[Dict[str, Type["MlemABC"]]] = {} @@ -163,12 +163,13 @@ def build_mlem_object( **kwargs, ): not_links, links = parse_links(model, str_conf or []) + if model.__is_root__: + kwargs[model.__config__.type_field] = subtype return build_model( model, str_conf=not_links, file_conf=file_conf, conf=conf, - **{model.__config__.type_field: subtype}, **kwargs, **links, ) diff --git a/mlem/core/dataset_type.py b/mlem/core/dataset_type.py index 50e861d5..a460079c 100644 --- a/mlem/core/dataset_type.py +++ b/mlem/core/dataset_type.py @@ -31,7 +31,7 @@ class DatasetType(ABC, MlemABC, WithRequirements): """ - Base class for dataset type metadata. + Base class for dataset metadata """ class Config: @@ -50,7 +50,6 @@ def check_type(obj, exp_type, exc_type): @abstractmethod def get_requirements(self) -> Requirements: - """""" # TODO: https://github.com/iterative/mlem/issues/16 docs return get_object_requirements(self) @abstractmethod @@ -74,6 +73,8 @@ def create(cls, obj: Any, **kwargs): class DatasetSerializer(ABC): + """Base class for dataset-to-dict serialization logic""" + @abstractmethod def serialize(self, instance: Any) -> dict: raise NotImplementedError @@ -88,6 +89,8 @@ def get_model(self, prefix: str = "") -> Union[Type[BaseModel], type]: class UnspecifiedDatasetType(DatasetType, DatasetSerializer): + """Special dataset type for cases when it's not provided""" + type: ClassVar = "unspecified" def serialize(self, instance: object) -> dict: @@ -107,15 +110,17 @@ def get_model(self, prefix: str = "") -> Type[BaseModel]: class DatasetHook(Hook[DatasetType], ABC): - pass + """Base class for hooks to analyze dataset objects""" class DatasetAnalyzer(Analyzer): + """Analyzer for dataset objects""" + base_hook_class = DatasetHook class DatasetReader(MlemABC, ABC): - """""" + """Base class for defining logic to read datasets from a set of `Artifact`s""" class Config: type_root = True @@ -135,7 +140,8 @@ def read_batch( class DatasetWriter(MlemABC): - """""" + """Base class for defining logic to write datasets. Should produce a set of + `Artifact`s and a corresponding reader""" class Config: type_root = True @@ -229,20 +235,13 @@ def read_batch( class ListDatasetType(DatasetType, DatasetSerializer): """ - DatasetType for list type - for a list of elements with same types such as [1, 2, 3, 4, 5] + DatasetType for lists with elements of the same type such as [1, 2, 3, 4, 5] """ type: ClassVar[str] = "list" dtype: DatasetType size: Optional[int] - def is_list(self): - return True - - def list_size(self): - return self.size - def get_requirements(self) -> Requirements: return self.dtype.get_requirements() @@ -286,9 +285,9 @@ def write( res[str(i)] = art readers.append(elem_reader) - return ListReader( - dataset_type=dataset, readers=readers - ), flatdict.FlatterDict(res, delimiter="/") + return ListReader(dataset_type=dataset, readers=readers), dict( + flatdict.FlatterDict(res, delimiter="/") + ) class ListReader(DatasetReader): @@ -393,7 +392,7 @@ def write( return ( _TupleLikeDatasetReader(dataset_type=dataset, readers=readers), - flatdict.FlatterDict(res, delimiter="/"), + dict(flatdict.FlatterDict(res, delimiter="/")), ) @@ -419,8 +418,8 @@ def read_batch( class TupleLikeListDatasetType(_TupleLikeDatasetType): """ - DatasetType for tuple-like list type - can be a list of elements with different types such as [1, False, 3.2, "mlem", None] + DatasetType for list with separate type for each element + such as [1, False, 3.2, "mlem", None] """ actual_type: ClassVar = list @@ -461,9 +460,12 @@ def process(cls, obj, **kwargs) -> DatasetType: if not py_types.intersection( PrimitiveType.PRIMITIVES ): # py_types is guaranteed to be singleton set here - return TupleLikeListDatasetType( - items=[DatasetAnalyzer.analyze(o) for o in obj] - ) + items_types = [DatasetAnalyzer.analyze(o) for o in obj] + first, *others = items_types + for other in others: + if first != other: + return TupleLikeListDatasetType(items=items_types) + return ListDatasetType(dtype=first, size=len(obj)) # optimization for large lists of same primitive type elements return ListDatasetType( @@ -473,7 +475,7 @@ def process(cls, obj, **kwargs) -> DatasetType: class DictDatasetType(DatasetType, DatasetSerializer, DatasetHook): """ - DatasetType for dict type + DatasetType for dict """ type: ClassVar[str] = "dict" @@ -553,9 +555,9 @@ def write( ) res[key] = art readers[key] = dtype_reader - return DictReader( - dataset_type=dataset, item_readers=readers - ), flatdict.FlatterDict(res, delimiter="/") + return DictReader(dataset_type=dataset, item_readers=readers), dict( + flatdict.FlatterDict(res, delimiter="/") + ) class DictReader(DatasetReader): diff --git a/mlem/core/hooks.py b/mlem/core/hooks.py index 87599700..ced9b860 100644 --- a/mlem/core/hooks.py +++ b/mlem/core/hooks.py @@ -23,7 +23,8 @@ class Hook(ABC, Generic[T]): """ - Base class for Hooks + Base class for Hooks. + Hooks are used by corresponding `Analyzer` to analyze and process objects """ priority = 0 @@ -80,71 +81,11 @@ def is_object_valid(cls, obj: Any) -> bool: return isinstance(obj, cls.valid_types) -# # noinspection PyAbstractClass -# class CanIsAMustHookMixin(Hook): -# """ -# Mixin for cases when can_process equals to must_process -# """ -# -# def can_process(self, obj) -> bool: -# """Returns same as :meth:`Hook.must_process`""" -# return self.must_process(obj) -# -# -# # noinspection PyAbstractClass -# class TypeHookMixin(CanIsAMustHookMixin): -# """ -# Mixin for cases when hook must process objects of certain types -# """ -# valid_types: List[Type] = None -# -# def must_process(self, obj) -> bool: -# """Returns True if obj is instance of one of valid types""" -# return any(isinstance(obj, t) for t in self.valid_types) -# -# -# class BaseModuleHookMixin(CanIsAMustHookMixin, Hook): -# """ -# Mixin for cases when hook must process all objects with certain base modules -# """ -# -# @abstractmethod -# def is_valid_base_module_name(self, module_name: str) -> bool: -# """ -# Must return True if module_name is valid for this hook -# -# :param module_name: module name -# :return: True or False -# """ -# pass # pragma: no cover -# -# def is_valid_base_module(self, base_module: ModuleType) -> bool: -# """ -# Returns True if module is valid -# -# :param base_module: module object -# :return: True or False -# """ -# if base_module is None: -# return False -# return self.is_valid_base_module_name(base_module.__name__) -# -# def must_process(self, obj): -# """Returns True if obj has valid base module""" -# return self.is_valid_base_module(get_object_base_module(obj)) -# -# -# class LibHookMixin(BaseModuleHookMixin): -# """ -# Mixin for cases when hook must process all objects with certain base module -# """ -# base_module_name = None -# -# def is_valid_base_module_name(self, base_module: str) -> bool: -# return base_module == self.base_module_name - - class Analyzer(Generic[T]): + """Base class for analyzers. + Analyzers hold list of corresponding hooks. The `analyze` method goes through + all the hook to find valid one and uses it to `process` the object""" + base_hook_class: Type[Hook[T]] hooks: List[Type[Hook[T]]] diff --git a/mlem/core/import_objects.py b/mlem/core/import_objects.py index 0aa49fd0..b7f2e318 100644 --- a/mlem/core/import_objects.py +++ b/mlem/core/import_objects.py @@ -13,7 +13,10 @@ class ImportHook(Hook[MlemObject], MlemABC, ABC): - """""" + """Base class for defining import hooks. + On every import attemt all available hooks are checked if the imported path + represented by `Location` instance if valid for them. Then process method is + called on a hook that first passed the check""" type: ClassVar[str] abs_name: ClassVar = "import" @@ -51,6 +54,8 @@ def analyze( # pylint: disable=arguments-differ # so what class ExtImportHook(ImportHook, ABC): + """Base class for import hooks that target particular file extensions""" + EXTS: ClassVar[Tuple[str, ...]] @classmethod @@ -59,6 +64,8 @@ def is_object_valid(cls, obj: Location) -> bool: class PickleImportHook(ExtImportHook): + """Import hook for pickle files""" + EXTS: ClassVar = (".pkl", ".pickle") type: ClassVar = "pickle" diff --git a/mlem/core/index.py b/mlem/core/index.py index 7077b06a..9d4b268d 100644 --- a/mlem/core/index.py +++ b/mlem/core/index.py @@ -18,6 +18,8 @@ class Index(MlemABC): + """Base class for mlem object indexing logic""" + class Config: type_root = True @@ -52,6 +54,9 @@ def parse_type_filter(type_filter: TypeFilter) -> Set[Type[MlemObject]]: class LinkIndex(Index): + """Indexing base on contents of MLEM_DIR - either objects or links to them + should be there""" + type: ClassVar = "link" def index(self, obj: MlemObject, location: Location): @@ -117,6 +122,8 @@ def list( class FileIndex(Index): + """Index as a single file""" + type: ClassVar = "file" filename = "index.yaml" diff --git a/mlem/core/meta_io.py b/mlem/core/meta_io.py index 3fb00332..663c45c4 100644 --- a/mlem/core/meta_io.py +++ b/mlem/core/meta_io.py @@ -80,6 +80,9 @@ def uri_repr(self): class UriResolver(ABC): + """Base class for resolving location. Turns (path, repo, rev, fs) tuple + into a normalized `Location` instance""" + impls: List[Type["UriResolver"]] = [] versioning_support: bool = False @@ -210,6 +213,8 @@ def get_repo_uri( # pylint: disable=unused-argument class GithubResolver(UriResolver): + """Resolve https://github.com URLs""" + PROTOCOL = "github://" GITHUB_COM = "https://github.com" @@ -307,6 +312,8 @@ def get_repo_uri( class FSSpecResolver(UriResolver): + """Resolve different fsspec URIs""" + @classmethod def check( cls, diff --git a/mlem/core/model.py b/mlem/core/model.py index 77e2b92a..24c1bfab 100644 --- a/mlem/core/model.py +++ b/mlem/core/model.py @@ -32,9 +32,7 @@ class ModelIO(MlemABC): - """ - IO base class for models - """ + """Base class for model IO. Represents a way to save and load model files""" class Config: type_root = True @@ -45,7 +43,7 @@ class Config: @abstractmethod def dump(self, storage: Storage, path, model) -> Artifacts: """ """ - raise NotImplementedError() + raise NotImplementedError @abstractmethod def load(self, artifacts: Artifacts): @@ -53,10 +51,12 @@ def load(self, artifacts: Artifacts): Must load and return model :return: model object """ - raise NotImplementedError() + raise NotImplementedError class SimplePickleIO(ModelIO): + """IO with simple pickling of python model object""" + type: ClassVar[str] = "simple_pickle" def dump(self, storage: Storage, path: str, model) -> Artifacts: @@ -72,6 +72,8 @@ def load(self, artifacts: Artifacts): class Argument(BaseModel): + """Function argument descriptor""" + name: str type_: DatasetType required: bool = True @@ -114,7 +116,8 @@ def compose_args( skip_first: bool = False, auto_infer: bool = False, **call_kwargs, -): +) -> List[Argument]: + """Create a list of `Argument`s from argspec""" args_defaults = dict( zip( reversed(argspec.args or ()), @@ -142,6 +145,8 @@ def compose_args( class Signature(BaseModel, WithRequirements): + """Function signature descriptor""" + name: str args: List[Argument] returns: DatasetType @@ -192,7 +197,7 @@ def get_requirements(self): class ModelType(ABC, MlemABC, WithRequirements): """ - Base class for dataset type metadata. + Base class for model metadata. """ class Config: @@ -272,6 +277,8 @@ def get_requirements(self) -> Requirements: class ModelHook(Hook[ModelType], ABC): + """Base class for hooks to analyze model objects""" + valid_types: ClassVar[Optional[Tuple[Type, ...]]] = None @classmethod @@ -283,6 +290,8 @@ def process( # pylint: disable=arguments-differ # so what class ModelAnalyzer(Analyzer[ModelType]): + """Analyzer for model objects""" + base_hook_class = ModelHook hooks: List[Type[ModelHook]] # type: ignore diff --git a/mlem/core/objects.py b/mlem/core/objects.py index 000cb71e..08adac80 100644 --- a/mlem/core/objects.py +++ b/mlem/core/objects.py @@ -64,7 +64,10 @@ class MlemObject(MlemABC): - """""" + """Base class for MLEM objects. + MLEM objects contain metadata about different types of objects and are saved + in a form of `.mlem` files. + """ class Config: exclude = {"location"} @@ -347,6 +350,9 @@ def meta_hash(self): class MlemLink(MlemObject): + """Link is a special MlemObject that represents a MlemObject in a different + location""" + path: str repo: Optional[str] = None rev: Optional[str] = None @@ -433,6 +439,8 @@ def from_location( class _WithArtifacts(ABC, MlemObject): + """Special subtype of MlemObject that can have files (artifacts) attached""" + __abstract__: ClassVar[bool] = True artifacts: Optional[Artifacts] = None requirements: Requirements = Requirements.new() @@ -574,6 +582,8 @@ def checkenv(self): class MlemModel(_WithArtifacts): + """MlemObject representing a ML model""" + object_type: ClassVar = "model" model_type_cache: Any model_type: ModelType @@ -625,6 +635,8 @@ def __getattr__(self, item): class MlemDataset(_WithArtifacts): + """MlemObject representing a dataset""" + class Config: exclude = {"dataset"} @@ -682,15 +694,34 @@ def load_value(self): self.dataset = self.reader.read(self.relative_artifacts) def read_batch(self, batch_size: int) -> Iterator[DatasetType]: - assert isinstance(self.reader, DatasetReader) + if self.reader is None: + raise MlemObjectNotSavedError( + "Cannot read batch from not saved dataset" + ) return self.reader.read_batch(self.relative_artifacts, batch_size) def get_value(self): return self.data +class MlemPackager(MlemObject): + """Packager is base class to define different ways of packaging models + into different formats""" + + class Config: + type_root = True + type_field = "type" + + object_type: ClassVar = "packager" + abs_name: ClassVar[str] = "packager" + + @abstractmethod + def package(self, obj: MlemModel): # TODO maybe we can also pack datasets? + raise NotImplementedError + + class DeployState(MlemABC): - """""" + """Base class for deployment state metadata""" class Config: type_root = True @@ -708,7 +739,7 @@ def get_client(self): class MlemEnv(MlemObject, Generic[DT]): - """""" + """Base class for target environment metadata""" class Config: type_root = True @@ -721,12 +752,10 @@ class Config: @abstractmethod def deploy(self, meta: DT): - """""" raise NotImplementedError @abstractmethod def destroy(self, meta: DT): - """""" raise NotImplementedError @abstractmethod @@ -741,6 +770,8 @@ def check_type(self, deploy: "MlemDeploy"): class DeployStatus(str, Enum): + """Enum with deployment statuses""" + UNKNOWN = "unknown" NOT_DEPLOYED = "not_deployed" STARTING = "starting" @@ -750,7 +781,7 @@ class DeployStatus(str, Enum): class MlemDeploy(MlemObject): - """""" + """Base class for deployment metadata""" object_type: ClassVar = "deployment" @@ -849,7 +880,8 @@ def replace_model(self, model: MlemModel): def find_object( path: str, fs: AbstractFileSystem, repo: str = None ) -> Tuple[str, str]: - """assumes .mlem/ content is valid""" + """Extract object_type and path from path. + assumes .mlem/ content is valid""" if repo is None: repo = find_repo_root(path, fs) if repo is not None and path.startswith(repo): diff --git a/mlem/core/requirements.py b/mlem/core/requirements.py index 00cbbb91..a3af94aa 100644 --- a/mlem/core/requirements.py +++ b/mlem/core/requirements.py @@ -44,7 +44,7 @@ class Requirement(MlemABC): """ - Base class for python requirement + Base class for requirement """ class Config: @@ -264,6 +264,8 @@ def to_sources_dict(self) -> Dict[str, bytes]: class FileRequirement(CustomRequirement): + """Represents an additional file""" + type: ClassVar[str] = "file" is_package: bool = False module: str = "" @@ -285,6 +287,8 @@ def from_path(cls, path: str): class UnixPackageRequirement(Requirement): + """Represents a unix package that needs to be installed""" + type: ClassVar[str] = "unix" package_name: str @@ -518,6 +522,8 @@ def resolve_requirements(other: "AnyRequirements") -> Requirements: class WithRequirements: + """A mixing for objects that should provide their requirements""" + def get_requirements(self) -> Requirements: from mlem.utils.module import get_object_requirements @@ -526,7 +532,7 @@ def get_requirements(self) -> Requirements: class LibRequirementsMixin(WithRequirements): """ - :class:`.DatasetType` mixin which provides requirements list consisting of + Mixin which provides requirements list consisting of PIP packages represented by module objects in `libraries` field. """ @@ -539,6 +545,8 @@ def get_requirements(self) -> Requirements: class RequirementsHook(Hook[Requirements], ABC): + """Hook for enriching requirements list""" + @classmethod @abstractmethod def is_object_valid(cls, obj: Requirement) -> bool: @@ -551,6 +559,8 @@ def process(cls, obj: Requirement, **kwargs) -> Requirements: class AddRequirementHook(RequirementsHook, ABC): + """Abstract hook to add new requirements""" + to_add: AnyRequirements = [] @classmethod @@ -559,6 +569,8 @@ def process(cls, obj: Requirement, **kwargs) -> Requirements: class RequirementsAnalyzer(Analyzer[Requirements]): + """Analyzer to enrich requirements""" + base_hook_class = RequirementsHook diff --git a/mlem/deploy/__init__.py b/mlem/deploy/__init__.py deleted file mode 100644 index 66fc4723..00000000 --- a/mlem/deploy/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -MLEM's deployment functionality -""" diff --git a/mlem/ext.py b/mlem/ext.py index a0d1bbcb..43bffe4d 100644 --- a/mlem/ext.py +++ b/mlem/ext.py @@ -99,8 +99,6 @@ class ExtensionLoader: Extension("mlem.contrib.catboost", ["catboost"], False), # Extension('mlem.contrib.aiohttp', ['aiohttp', 'aiohttp_swagger']), # Extension('mlem.contrib.flask', ['flask', 'flasgger'], False), - # Extension('mlem.contrib.sqlalchemy', ['sqlalchemy']), - # Extension('mlem.contrib.s3', ['boto3']), # Extension('mlem.contrib.imageio', ['imageio']), Extension("mlem.contrib.lightgbm", ["lightgbm"], False), Extension("mlem.contrib.xgboost", ["xgboost"], False), diff --git a/mlem/pack/__init__.py b/mlem/pack/__init__.py deleted file mode 100644 index af5873a1..00000000 --- a/mlem/pack/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Packing models to different output formats, such as docker images -""" -from .base import Packager - -# from .docker_dir import DockerDirPackager - -__all__ = ["Packager"] diff --git a/mlem/pack/base.py b/mlem/pack/base.py deleted file mode 100644 index 7df6c47e..00000000 --- a/mlem/pack/base.py +++ /dev/null @@ -1,19 +0,0 @@ -from abc import abstractmethod -from typing import ClassVar - -from mlem.core.objects import MlemModel, MlemObject - - -class Packager(MlemObject): - """""" - - class Config: - type_root = True - type_field = "type" - - object_type: ClassVar = "packager" - abs_name: ClassVar[str] = "packager" - - @abstractmethod - def package(self, obj: MlemModel): # TODO maybe we can also pack datasets? - raise NotImplementedError diff --git a/mlem/polydantic/core.py b/mlem/polydantic/core.py index 2926f620..6f2b8910 100644 --- a/mlem/polydantic/core.py +++ b/mlem/polydantic/core.py @@ -67,7 +67,7 @@ def validate(cls, value): """Polymorphic magic goes here""" if isinstance(value, cls): return value - if not cls.__is_root__: + if not cls.__is_root__ and cls.__config__.type_field not in value: return super().validate(value) if isinstance(value, str): value = {cls.__config__.type_field: value} diff --git a/mlem/runtime/__init__.py b/mlem/runtime/__init__.py index 7dbd66ec..2166e0e0 100644 --- a/mlem/runtime/__init__.py +++ b/mlem/runtime/__init__.py @@ -1,3 +1,3 @@ -from .interface.base import Interface, Signature +from .interface import Interface, Signature __all__ = ["Interface", "Signature"] diff --git a/mlem/runtime/client/base.py b/mlem/runtime/client.py similarity index 92% rename from mlem/runtime/client/base.py rename to mlem/runtime/client.py index 310343ec..29b8b9f3 100644 --- a/mlem/runtime/client/base.py +++ b/mlem/runtime/client.py @@ -8,13 +8,15 @@ from mlem.core.base import MlemABC from mlem.core.errors import MlemError, WrongMethodError from mlem.core.model import Signature -from mlem.runtime.interface.base import ExecutionError, InterfaceDescriptor +from mlem.runtime.interface import ExecutionError, InterfaceDescriptor logger = logging.getLogger(__name__) -class BaseClient(MlemABC, ABC): - """""" +class Client(MlemABC, ABC): + """Client is a way to invoke methods on running `Server` instance. + `Client`s dynamically define python methods based on interfaces + exposed by `Server`""" class Config: type_root = True @@ -86,7 +88,7 @@ def __call__(self, *args, **kwargs): return self.method.returns.get_serializer().deserialize(out) -class HTTPClient(BaseClient): +class HTTPClient(Client): type: ClassVar[str] = "http" host: str = "0.0.0.0" port: Optional[int] = 8080 diff --git a/mlem/runtime/client/__init__.py b/mlem/runtime/client/__init__.py deleted file mode 100644 index 7897dc1d..00000000 --- a/mlem/runtime/client/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Abstract classes that define required client functionality -""" diff --git a/mlem/runtime/interface/base.py b/mlem/runtime/interface.py similarity index 94% rename from mlem/runtime/interface/base.py rename to mlem/runtime/interface.py index 1e6b4f6a..7da777ad 100644 --- a/mlem/runtime/interface/base.py +++ b/mlem/runtime/interface.py @@ -24,7 +24,10 @@ class InterfaceDescriptor(BaseModel): class Interface(ABC, MlemABC): - """""" + """Base class for runtime interfaces. + Describes a set of methods togerher with their signatures (arguments + and return type) and executors - actual python callables to be run + when the method is invoked. Used to setup `Server`""" class Config: type_root = True @@ -129,6 +132,9 @@ def expose(f): class SimpleInterface(Interface): + """Interface that exposes its own methods that marked with `expose` + decorator""" + type: ClassVar[str] = "simple" methods: InterfaceDescriptor = InterfaceDescriptor() @@ -162,6 +168,8 @@ def get_method_signature(self, method_name: str) -> Signature: class ModelInterface(Interface): + """Interface that descibes model methods""" + class Config: exclude = {"model_type"} diff --git a/mlem/runtime/interface/__init__.py b/mlem/runtime/interface/__init__.py deleted file mode 100644 index e7151657..00000000 --- a/mlem/runtime/interface/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Abstract classes that define required Interface for serving models -""" diff --git a/mlem/runtime/server/base.py b/mlem/runtime/server.py similarity index 77% rename from mlem/runtime/server/base.py rename to mlem/runtime/server.py index 80c4e57f..bd5bd906 100644 --- a/mlem/runtime/server/base.py +++ b/mlem/runtime/server.py @@ -4,11 +4,13 @@ from mlem.core.base import MlemABC from mlem.core.requirements import WithRequirements -from mlem.runtime.interface.base import Interface +from mlem.runtime.interface import Interface class Server(MlemABC, ABC, WithRequirements): - """""" + """Base class for defining serving logic. Server's serve method accepts + an instance of `Interface` and should expose all of it's methods via some + protocol""" class Config: type_root = True @@ -20,7 +22,7 @@ class Config: @abstractmethod def serve(self, interface: Interface): - raise NotImplementedError() + raise NotImplementedError def get_env_vars(self) -> Dict[str, str]: return self.env_vars or {} diff --git a/mlem/runtime/server/__init__.py b/mlem/runtime/server/__init__.py deleted file mode 100644 index 41149d07..00000000 --- a/mlem/runtime/server/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Abstract classes for model serving functionality -""" diff --git a/mlem/utils/github.py b/mlem/utils/github.py index 42ef3b8c..c76b7b8a 100644 --- a/mlem/utils/github.py +++ b/mlem/utils/github.py @@ -52,6 +52,7 @@ def get_github_envs() -> Dict: def ls_branches(repo_url: str) -> Dict[str, str]: + """List branches in remote git repo""" import git git.cmd.Git().ls_remote(repo_url) @@ -65,14 +66,17 @@ def ls_branches(repo_url: str) -> Dict[str, str]: def ls_github_branches(org: str, repo: str): + """List branches in github repo""" return _ls_github_refs(org, repo, "branches") def ls_github_tags(org: str, repo: str): + """List tags in github repo""" return _ls_github_refs(org, repo, "tags") def github_check_rev(org: str, repo: str, rev: str): + """Check that rev exists in a github repo""" res = requests.head( f"https://api.github.com/repos/{org}/{repo}/commits/{rev}", auth=(CONFIG.GITHUB_USERNAME, CONFIG.GITHUB_TOKEN), # type: ignore diff --git a/mlem/utils/importing.py b/mlem/utils/importing.py index c8232e01..e1671c0d 100644 --- a/mlem/utils/importing.py +++ b/mlem/utils/importing.py @@ -4,6 +4,7 @@ def import_from_path(name: str, path: str): + """Import module from local path""" spec = importlib.util.spec_from_file_location(name, path) if spec is None: raise ImportError(f"Cannot import spec from {path}") @@ -14,6 +15,7 @@ def import_from_path(name: str, path: str): def import_string(path): + """Import object from dotted path (..)""" split = path.split(".") module_name, object_name = ".".join(split[:-1]), split[-1] mod = import_module(module_name) @@ -26,6 +28,7 @@ def import_string(path): def module_importable(module_name): + """Check if module is importable (by importing it xD)""" try: import_module(module_name) return True diff --git a/mlem/utils/module.py b/mlem/utils/module.py index f755db2b..a59ae1ab 100644 --- a/mlem/utils/module.py +++ b/mlem/utils/module.py @@ -10,7 +10,7 @@ from functools import lru_cache, wraps from pickle import PickleError from types import FunctionType, LambdaType, MethodType, ModuleType -from typing import Dict, List, Optional, Set, Union +from typing import Dict, List, Optional, Set, Tuple, Union import dill import requests @@ -35,20 +35,6 @@ IGNORE_TYPES_REQ = (type(Requirements._abc_impl),) # type: ignore -def analyze_module_imports(module_path): - module = importing.import_module(module_path) - requirements = set() - for _name, obj in module.__dict__.items(): - if isinstance(obj, ModuleType): - mod = obj - else: - mod = get_object_base_module(obj) - if is_installable_module(mod) and not is_private_module(mod): - requirements.add(get_module_repr(mod)) - - return requirements - - def check_pypi_module( module_name, module_version=None, raise_on_error=False, warn_on_error=True ): @@ -317,17 +303,18 @@ def get_module_as_requirement( return InstallableRequirement(module=mod.__name__, version=mod_version) -def get_local_module_reqs(mod): +def get_local_module_reqs(mod) -> List[ModuleType]: + """Parses module AST to find all import statements""" tree = ast.parse(inspect.getsource(mod)) - imports = [] + imports: List[Tuple[str, Optional[str]]] = [] for statement in tree.body: if isinstance(statement, ast.Import): imports += [(n.name, None) for n in statement.names] elif isinstance(statement, ast.ImportFrom): if statement.level == 0: - imp = (statement.module, None) + imp = (statement.module or "", None) else: - imp = ("." + statement.module, mod.__package__) + imp = ("." + (statement.module or ""), mod.__package__) imports.append(imp) result = [importing.import_module(i, p) for i, p in imports] @@ -360,6 +347,8 @@ def lstrip_lines(lines: Union[str, List[str]], check=True) -> str: class ImportFromVisitor(ast.NodeVisitor): + """Visitor implementation to find requirements""" + def __init__(self, pickler: "RequirementAnalyzer", obj): self.obj = obj self.pickler = pickler @@ -389,6 +378,9 @@ def visit_Name(self, node: ast.Name): # noqa def add_closure_inspection(f): + """Adds inspection logic for function-like objects to get requierments + from closure vars""" + @wraps(f) def wrapper(pickler: "RequirementAnalyzer", obj): base_module = get_object_base_module(obj) @@ -433,6 +425,7 @@ def wrapper(pickler: "RequirementAnalyzer", obj): def save_type_with_classvars(pickler: "RequirementAnalyzer", obj): + """Add requirement inspection for classvars""" for name, attr in obj.__dict__.items(): if name.startswith("__") and name.endswith("__"): continue @@ -448,6 +441,9 @@ def save_type_with_classvars(pickler: "RequirementAnalyzer", obj): class RequirementAnalyzer(dill.Pickler): + """Special pickler implementation that collects requirements while pickling + (and not pickling actualy)""" + ignoring = ( "dill", "mlem", diff --git a/mlem/utils/path.py b/mlem/utils/path.py index 418d056e..c8e256f5 100644 --- a/mlem/utils/path.py +++ b/mlem/utils/path.py @@ -2,6 +2,7 @@ def make_posix(path: Optional[str]): + """Turn windows path into posix""" if not path: return path return path.replace("\\", "/") diff --git a/mlem/utils/root.py b/mlem/utils/root.py index 89563dcc..ee4ad56c 100644 --- a/mlem/utils/root.py +++ b/mlem/utils/root.py @@ -13,6 +13,7 @@ def mlem_repo_exists( path: str, fs: AbstractFileSystem, raise_on_missing: bool = False ): + """Check is mlem repo exists at path""" try: exists = fs.exists(posixpath.join(path, MLEM_DIR)) except ValueError: diff --git a/mlem/utils/templates.py b/mlem/utils/templates.py index e978dcdb..2ffe4caa 100644 --- a/mlem/utils/templates.py +++ b/mlem/utils/templates.py @@ -7,6 +7,8 @@ class TemplateModel(BaseModel): + """Base class to render jinja templates from pydantic models""" + TEMPLATE_FILE: ClassVar[str] TEMPLATE_DIR: ClassVar[str] diff --git a/tests/api/test_commands.py b/tests/api/test_commands.py index 66a2812b..aa1652d0 100644 --- a/tests/api/test_commands.py +++ b/tests/api/test_commands.py @@ -17,7 +17,7 @@ from mlem.core.metadata import load from mlem.core.model import ModelIO from mlem.core.objects import MlemDataset, MlemLink, MlemModel -from mlem.runtime.client.base import HTTPClient +from mlem.runtime.client import HTTPClient from mlem.utils.path import make_posix from tests.conftest import MLEM_TEST_REPO, long, need_test_repo_auth diff --git a/tests/cli/test_apply.py b/tests/cli/test_apply.py index cae634c3..4589d0c3 100644 --- a/tests/cli/test_apply.py +++ b/tests/cli/test_apply.py @@ -9,8 +9,11 @@ from sklearn.tree import DecisionTreeClassifier from mlem.api import load, save +from mlem.core.dataset_type import ListDatasetType from mlem.core.errors import MlemRootNotFound -from mlem.runtime.client.base import HTTPClient +from mlem.core.metadata import load_meta +from mlem.core.objects import MlemDataset +from mlem.runtime.client import HTTPClient from tests.conftest import MLEM_TEST_REPO, long, need_test_repo_auth @@ -69,6 +72,7 @@ def test_apply_batch(runner, model_path_batch, data_path_batch): path = posixpath.join(dir, "data") result = runner.invoke( [ + "--tb", "apply", model_path_batch, data_path_batch, @@ -82,8 +86,12 @@ def test_apply_batch(runner, model_path_batch, data_path_batch): ], ) assert result.exit_code == 0, (result.output, result.exception) - predictions = load(path) - assert isinstance(predictions, ndarray) + predictions_meta = load_meta( + path, load_value=True, force_type=MlemDataset + ) + assert isinstance(predictions_meta.dataset, ListDatasetType) + predictions = predictions_meta.get_value() + assert isinstance(predictions, list) def test_apply_with_import(runner, model_meta_saved_single, tmp_path_factory): @@ -94,6 +102,7 @@ def test_apply_with_import(runner, model_meta_saved_single, tmp_path_factory): path = posixpath.join(dir, "data") result = runner.invoke( [ + "--tb", "apply", model_meta_saved_single.loc.uri, data_path, diff --git a/tests/cli/test_deploy.py b/tests/cli/test_deploy.py index 1959fc75..8ed0874c 100644 --- a/tests/cli/test_deploy.py +++ b/tests/cli/test_deploy.py @@ -14,7 +14,7 @@ MlemEnv, MlemLink, ) -from mlem.runtime.client.base import BaseClient, HTTPClient +from mlem.runtime.client import Client, HTTPClient from tests.cli.conftest import Runner @@ -27,7 +27,7 @@ def mock_deploy_get_client(mocker, request_get_mock, request_post_mock): class DeployStateMock(DeployState): - def get_client(self) -> BaseClient: + def get_client(self) -> Client: pass diff --git a/tests/cli/test_pack.py b/tests/cli/test_pack.py index 0bd2f552..7d69ea00 100644 --- a/tests/cli/test_pack.py +++ b/tests/cli/test_pack.py @@ -1,13 +1,12 @@ import os.path from typing import ClassVar -from mlem.core.objects import MlemModel -from mlem.pack import Packager +from mlem.core.objects import MlemModel, MlemPackager from mlem.utils.path import make_posix from tests.cli.conftest import Runner -class PackagerMock(Packager): +class PackagerMock(MlemPackager): type: ClassVar = "mock" target: str diff --git a/tests/cli/test_serve.py b/tests/cli/test_serve.py index 0953795c..695c5a56 100644 --- a/tests/cli/test_serve.py +++ b/tests/cli/test_serve.py @@ -1,7 +1,7 @@ from typing import ClassVar from mlem.runtime import Interface -from mlem.runtime.server.base import Server +from mlem.runtime.server import Server from mlem.ui import echo from tests.cli.conftest import Runner diff --git a/tests/conftest.py b/tests/conftest.py index 748b95f8..395f856c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,7 +26,7 @@ from mlem.core.model import Argument, ModelType, Signature from mlem.core.objects import MlemDataset, MlemModel from mlem.core.requirements import Requirements -from mlem.runtime.interface.base import ModelInterface +from mlem.runtime.interface import ModelInterface from mlem.utils.github import ls_github_branches RESOURCES = "resources" @@ -161,7 +161,7 @@ def patched_get(url, params=None, **kwargs): return client.get(url, params=params, **kwargs) return mocker.patch( - "mlem.runtime.client.base.requests.get", + "mlem.runtime.client.requests.get", side_effect=patched_get, ) @@ -178,7 +178,7 @@ def patched_post(url, data=None, json=None, **kwargs): return client.post(url, data=data, json=json, **kwargs) return mocker.patch( - "mlem.runtime.client.base.requests.post", + "mlem.runtime.client.requests.post", side_effect=patched_post, ) diff --git a/tests/contrib/test_fastapi.py b/tests/contrib/test_fastapi.py index 5c111ffe..bd62305c 100644 --- a/tests/contrib/test_fastapi.py +++ b/tests/contrib/test_fastapi.py @@ -11,7 +11,7 @@ from mlem.core.dataset_type import DatasetAnalyzer from mlem.core.model import Argument, Signature from mlem.core.objects import MlemModel -from mlem.runtime.interface.base import ModelInterface +from mlem.runtime.interface import ModelInterface @pytest.fixture diff --git a/tests/core/test_base.py b/tests/core/test_base.py index d64c07d6..8a08a04b 100644 --- a/tests/core/test_base.py +++ b/tests/core/test_base.py @@ -3,15 +3,14 @@ from mlem.contrib.docker import DockerImagePackager from mlem.contrib.fastapi import FastAPIServer from mlem.core.base import MlemABC, build_mlem_object, parse_links, smart_split -from mlem.core.objects import MlemLink, MlemModel, MlemObject -from mlem.pack import Packager -from mlem.runtime.server.base import Server +from mlem.core.objects import MlemLink, MlemModel, MlemObject, MlemPackager +from mlem.runtime.server import Server from tests.conftest import resource_path def test_build_model(): res = build_mlem_object( - Packager, + MlemPackager, "docker", ["image.name=kek"], [f"server={resource_path(__file__, 'server.yaml')}"], diff --git a/tests/core/test_dataset_type.py b/tests/core/test_dataset_type.py index d2119c5d..b1ac6fdb 100644 --- a/tests/core/test_dataset_type.py +++ b/tests/core/test_dataset_type.py @@ -3,6 +3,7 @@ from mlem.core.dataset_type import ( DatasetAnalyzer, + DatasetReader, DatasetType, DictDatasetType, DictReader, @@ -11,6 +12,7 @@ PrimitiveReader, PrimitiveType, TupleDatasetType, + TupleLikeListDatasetType, _TupleLikeDatasetReader, _TupleLikeDatasetWriter, ) @@ -92,14 +94,12 @@ def test_list_source(): reader_type=ListReader, ) - assert list(artifacts.keys()) == list( - map(lambda x: str(x) + "/data", range(len(l_value))) - ) - assert artifacts["0"]["data"].uri.endswith("data/0") - assert artifacts["1"]["data"].uri.endswith("data/1") - assert artifacts["2"]["data"].uri.endswith("data/2") - assert artifacts["3"]["data"].uri.endswith("data/3") - assert artifacts["4"]["data"].uri.endswith("data/4") + assert list(artifacts.keys()) == [f"{x}/data" for x in range(len(l_value))] + assert artifacts["0/data"].uri.endswith("data/0") + assert artifacts["1/data"].uri.endswith("data/1") + assert artifacts["2/data"].uri.endswith("data/2") + assert artifacts["3/data"].uri.endswith("data/3") + assert artifacts["4/data"].uri.endswith("data/4") def test_tuple(): @@ -152,16 +152,22 @@ def test_tuple_source(): "4/data", "5/data", ] - assert list(artifacts["1"].keys()) == list( - map(lambda x: str(x) + "/data", range(len(t_value[1]))) - ) - assert artifacts["0"]["data"].uri.endswith("data/0") - assert artifacts["1"]["0"]["data"].uri.endswith("data/1/0") - assert artifacts["1"]["1"]["data"].uri.endswith("data/1/1") - assert artifacts["2"]["data"].uri.endswith("data/2") - assert artifacts["3"]["data"].uri.endswith("data/3") - assert artifacts["4"]["data"].uri.endswith("data/4") - assert artifacts["5"]["data"].uri.endswith("data/5") + assert artifacts["0/data"].uri.endswith("data/0") + assert artifacts["1/0/data"].uri.endswith("data/1/0") + assert artifacts["1/1/data"].uri.endswith("data/1/1") + assert artifacts["2/data"].uri.endswith("data/2") + assert artifacts["3/data"].uri.endswith("data/3") + assert artifacts["4/data"].uri.endswith("data/4") + assert artifacts["5/data"].uri.endswith("data/5") + + +def test_tuple_reader(): + dataset_type = TupleLikeListDatasetType(items=[]) + assert dataset_type.dict()["type"] == "tuple_like_list" + reader = _TupleLikeDatasetReader(dataset_type=dataset_type, readers=[]) + new_reader = parse_obj_as(DatasetReader, reader.dict()) + res = new_reader.read({}) + assert res.data == [] def test_mixed_list_source(): @@ -183,16 +189,13 @@ def test_mixed_list_source(): "4/data", "5/data", ] - assert list(artifacts["1"].keys()) == list( - map(lambda x: str(x) + "/data", range(len(t_value[1]))) - ) - assert artifacts["0"]["data"].uri.endswith("data/0") - assert artifacts["1"]["0"]["data"].uri.endswith("data/1/0") - assert artifacts["1"]["1"]["data"].uri.endswith("data/1/1") - assert artifacts["2"]["data"].uri.endswith("data/2") - assert artifacts["3"]["data"].uri.endswith("data/3") - assert artifacts["4"]["data"].uri.endswith("data/4") - assert artifacts["5"]["data"].uri.endswith("data/5") + assert artifacts["0/data"].uri.endswith("data/0") + assert artifacts["1/0/data"].uri.endswith("data/1/0") + assert artifacts["1/1/data"].uri.endswith("data/1/1") + assert artifacts["2/data"].uri.endswith("data/2") + assert artifacts["3/data"].uri.endswith("data/3") + assert artifacts["4/data"].uri.endswith("data/4") + assert artifacts["5/data"].uri.endswith("data/5") def test_dict(): @@ -240,7 +243,6 @@ def custom_assert(x, y): ) assert list(artifacts.keys()) == ["1/data", "2/data", "3/1/data"] - assert list(artifacts["3"].keys()) == ["1/data"] - assert artifacts["1"]["data"].uri.endswith("data/1") - assert artifacts["2"]["data"].uri.endswith("data/2") - assert artifacts["3"]["1"]["data"].uri.endswith("data/3/1") + assert artifacts["1/data"].uri.endswith("data/1") + assert artifacts["2/data"].uri.endswith("data/2") + assert artifacts["3/1/data"].uri.endswith("data/3/1") diff --git a/tests/runtime/test_client.py b/tests/runtime/test_client.py index 84428dd5..8dc9893d 100644 --- a/tests/runtime/test_client.py +++ b/tests/runtime/test_client.py @@ -8,7 +8,7 @@ from mlem.core.dataset_type import DatasetAnalyzer from mlem.core.errors import WrongMethodError from mlem.core.model import Argument, Signature -from mlem.runtime.client.base import HTTPClient +from mlem.runtime.client import HTTPClient @pytest.fixture diff --git a/tests/runtime/test_interface.py b/tests/runtime/test_interface.py index edbf77a7..69c1e3f9 100644 --- a/tests/runtime/test_interface.py +++ b/tests/runtime/test_interface.py @@ -7,7 +7,7 @@ from mlem.core.model import Argument, Signature from mlem.core.requirements import Requirements from mlem.runtime import Interface -from mlem.runtime.interface.base import SimpleInterface, expose +from mlem.runtime.interface import SimpleInterface, expose class Container(DatasetType): diff --git a/tests/runtime/test_model_interface.py b/tests/runtime/test_model_interface.py index 86c5502a..b2c5a3f4 100644 --- a/tests/runtime/test_model_interface.py +++ b/tests/runtime/test_model_interface.py @@ -5,7 +5,7 @@ from mlem.contrib.sklearn import SklearnModel from mlem.core.objects import MlemModel from mlem.core.requirements import Requirements -from mlem.runtime.interface.base import ModelInterface +from mlem.runtime.interface import ModelInterface class PandasModel: diff --git a/tests/utils/test_module_tools.py b/tests/utils/test_module_tools.py index 9f16ed1a..8373c0df 100644 --- a/tests/utils/test_module_tools.py +++ b/tests/utils/test_module_tools.py @@ -1,13 +1,11 @@ from typing import ClassVar import numpy -import pydantic import pytest from pydantic import BaseModel from mlem.utils.importing import import_from_path, import_module from mlem.utils.module import ( - analyze_module_imports, check_pypi_module, get_module_repr, get_module_version, @@ -28,15 +26,6 @@ class Obj: pass -def test_analyze_module_imports(): - reqs = analyze_module_imports("tests.utils.test_module_tools") - assert reqs == { - get_module_repr(pytest), - get_module_repr(numpy), - get_module_repr(pydantic), - } - - @pytest.fixture() def external_local_module(tmp_path_factory): path = tmp_path_factory.mktemp("external") / "external.py"