Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add model_source_info to Model in aiplatform v1beta1 model.proto #1691

Merged
merged 11 commits into from
Sep 29, 2022
Merged
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@
from .types.migration_service import SearchMigratableResourcesResponse
from .types.model import Model
from .types.model import ModelContainerSpec
from .types.model import ModelSourceInfo
from .types.model import Port
from .types.model import PredictSchemata
from .types.model_deployment_monitoring_job import (
Expand Down Expand Up @@ -857,6 +858,7 @@
"ModelMonitoringObjectiveConfig",
"ModelMonitoringStatsAnomalies",
"ModelServiceClient",
"ModelSourceInfo",
"MutateDeployedIndexOperationMetadata",
"MutateDeployedIndexRequest",
"MutateDeployedIndexResponse",
Expand Down
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,7 @@
from .model import (
Model,
ModelContainerSpec,
ModelSourceInfo,
Port,
PredictSchemata,
)
Expand Down Expand Up @@ -885,6 +886,7 @@
"SearchMigratableResourcesResponse",
"Model",
"ModelContainerSpec",
"ModelSourceInfo",
"Port",
"PredictSchemata",
"ModelDeploymentMonitoringBigQueryTable",
Expand Down
33 changes: 33 additions & 0 deletions google/cloud/aiplatform_v1/types/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
"PredictSchemata",
"ModelContainerSpec",
"Port",
"ModelSourceInfo",
},
)

Expand Down Expand Up @@ -274,6 +275,11 @@ class Model(proto.Message):
Customer-managed encryption key spec for a
Model. If set, this Model and all sub-resources
of this Model will be secured by this key.
model_source_info (google.cloud.aiplatform_v1.types.ModelSourceInfo):
Output only. Source of a model. It can either
be automl training pipeline, custom training
pipeline, BigQuery ML, or existing Vertex AI
Model.
"""

class DeploymentResourcesType(proto.Enum):
Expand Down Expand Up @@ -443,6 +449,11 @@ class ExportableContent(proto.Enum):
number=24,
message=gca_encryption_spec.EncryptionSpec,
)
model_source_info = proto.Field(
proto.MESSAGE,
number=38,
message="ModelSourceInfo",
)


class PredictSchemata(proto.Message):
Expand Down Expand Up @@ -800,4 +811,26 @@ class Port(proto.Message):
)


class ModelSourceInfo(proto.Message):
r"""Detail description of the source information of the model.

Attributes:
source_type (google.cloud.aiplatform_v1.types.ModelSourceInfo.ModelSourceType):
Type of the model source.
"""

class ModelSourceType(proto.Enum):
r"""Source of the model."""
MODEL_SOURCE_TYPE_UNSPECIFIED = 0
AUTOML = 1
CUSTOM = 2
BQML = 3

source_type = proto.Field(
proto.ENUM,
number=1,
enum=ModelSourceType,
)


__all__ = tuple(sorted(__protobuf__.manifest))
Original file line number Diff line number Diff line change
Expand Up @@ -218,40 +218,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
26 changes: 13 additions & 13 deletions tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2032,22 +2032,19 @@ def test_parse_dataset_path():

def test_dataset_path():
project = "squid"
location = "clam"
dataset = "whelk"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
dataset = "clam"
expected = "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
actual = MigrationServiceClient.dataset_path(project, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "octopus",
"location": "oyster",
"dataset": "nudibranch",
"project": "whelk",
"dataset": "octopus",
}
path = MigrationServiceClient.dataset_path(**expected)

Expand All @@ -2057,19 +2054,22 @@ def test_parse_dataset_path():


def test_dataset_path():
project = "cuttlefish"
dataset = "mussel"
expected = "projects/{project}/datasets/{dataset}".format(
project = "oyster"
location = "nudibranch"
dataset = "cuttlefish"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, dataset)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "winkle",
"project": "mussel",
"location": "winkle",
"dataset": "nautilus",
}
path = MigrationServiceClient.dataset_path(**expected)
Expand Down