From 5d109a77ef561ce15c7e1fd0211c3653943c2c1e Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 14:52:50 +0000 Subject: [PATCH 01/48] rebase to master after ga merge --- .../cloud/aiplatform/metadata/types/base.py | 123 +++++ .../aiplatform/metadata/types/google_types.py | 224 +++++++++ .../aiplatform/metadata/types/system_types.py | 187 ++++++++ .../cloud/aiplatform/metadata/types/utils.py | 98 ++++ .../aiplatform/test_metadata_schema_types.py | 426 ++++++++++++++++++ 5 files changed, 1058 insertions(+) create mode 100644 google/cloud/aiplatform/metadata/types/base.py create mode 100644 google/cloud/aiplatform/metadata/types/google_types.py create mode 100644 google/cloud/aiplatform/metadata/types/system_types.py create mode 100644 google/cloud/aiplatform/metadata/types/utils.py create mode 100644 tests/unit/aiplatform/test_metadata_schema_types.py diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base.py new file mode 100644 index 0000000000..475570654b --- /dev/null +++ b/google/cloud/aiplatform/metadata/types/base.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth import credentials as auth_credentials +from typing import Optional, Dict +from google.cloud.aiplatform.metadata import artifact +from google.cloud.aiplatform import base + +from google.cloud.aiplatform.metadata import constants + +_LOGGER = base.Logger(__name__) + + +class BaseArtifactSchema(object): + """Base class for Metadata Artifact types. + + This is the base class for defining various artifact types, which can be + passed to google.Artifact to create a corresponding resource. + Artifacts carry a `metadata` field, which is a dictionary for storing + metadata related to this artifact. Subclasses from ArtifactType can enforce + various structure and field requirements for the metadata field. + + Args: + schema_title (str): + Optional. The schema title used by the Artifact, defaults to "system.Artifact" + resource_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + + ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" + SCHEMA_TITLE = "system.Artifact" + + def __init__( + self, + schema_title: Optional[str] = None, + resource_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + + """Initializes the Artifact with the given name, URI and metadata.""" + self.schema_title = BaseArtifactSchema.SCHEMA_TITLE + if schema_title: + self.schema_title = schema_title + self.resource_name = resource_name + + self.resource_id = None + if resource_name: + # Temporary work around while Artifact.create takes resource_id instead of resource_name + self.resource_id = resource_name.split("/")[-1] + + self.uri = uri + self.display_name = display_name + self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION + self.description = description + self.metadata = metadata + + def create( + self, + metadata_store_id: Optional[str] = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Creates a new Metadata Artifact. + + Args: + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//artifacts/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Artifact. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Artifact. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Artifact. Overrides + credentials set in aiplatform.init. + Returns: + Artifact: Instantiated representation of the managed Metadata Artifact. + """ + self.artifact = artifact.Artifact.create( + base_artifact=self, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) + return self.artifact diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py new file mode 100644 index 0000000000..135b7e43a7 --- /dev/null +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Optional, Dict, NamedTuple, List +from dataclasses import dataclass +from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.metadata.types import utils + + +class VertexDataset(base.BaseArtifactSchema): + """An artifact representing a Vertex Dataset.""" + + SCHEMA_TITLE = "google.VertexDataset" + + def __init__( + self, + dataset_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + dataset_name (str): + Optional. The name of the Dataset resource, in a form of + projects/{project}/locations/{location}/datasets/{datasets_name}. For + more details, see + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets/get + uri (str): + Optional. The URI for the assets of this Artifact. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = dataset_name + super(VertexDataset, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=dataset_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + + +class VertexModel(base.BaseArtifactSchema): + """An artifact representing a Vertex Model.""" + + SCHEMA_TITLE = "google.VertexModel" + + def __init__( + self, + vertex_model_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + vertex_model_name (str): + Optional. The name of the VertexModel resource, in a form of + projects/{project}/locations/{location}/models/{model}. For + more details, see + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models/get + uri (str): + Optional. The URI for the assets of this Artifact. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + + extended_metadata = metadata or {} + extended_metadata["resourceName"] = vertex_model_name + + super(VertexModel, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=vertex_model_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + + +class VertexEndpoint(base.BaseArtifactSchema): + """An artifact representing a Vertex Endpoint.""" + + SCHEMA_TITLE = "google.VertexEndpoint" + + def __init__( + self, + vertex_endpoint_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + vertex_endpoint_name (str): + Optional. The name of the VertexEndpoint resource, in a form of + projects/{project}/locations/{location}/endpoints/{endpoint}. For + more details, see + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/get + uri (str): + Optional. The URI for the assets of this Artifact. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = vertex_endpoint_name + + super(VertexEndpoint, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=vertex_endpoint_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + + +class UnmanagedContainerModel(base.BaseArtifactSchema): + """An artifact representing a Vertex Unmanaged Container Model.""" + + SCHEMA_TITLE = "google.UnmanagedContainerModel" + + def __init__( + self, + predict_schema_ta: utils.PredictSchemata, + container_spec: utils.PredictSchemata, + unmanaged_container_model_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + predict_schema_ta (PredictSchemata): + An instance of PredictSchemata which holds instance, parameter and prediction schema uris. + container_spec (ContainerSpec): + An instance of ContainerSpec which holds the container configuration for the model. + unmanaged_container_model_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The URI for the assets of this Artifact. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = unmanaged_container_model_name + extended_metadata["predictSchemata"] = predict_schema_ta.to_dict() + extended_metadata["containerSpec"] = container_spec.to_dict() + + super(UnmanagedContainerModel, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=unmanaged_container_model_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + \ No newline at end of file diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py new file mode 100644 index 0000000000..8db4fcfebe --- /dev/null +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Optional, Dict, List +from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.metadata.types import utils +from itertools import zip_longest + + +class Model(base.BaseArtifactSchema): + """Schemaless Artifact Type to store Markdown file.""" + + SCHEMA_TITLE = "system.Model" + + def __init__( + self, + model_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + model_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The URI for the assets of this base. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = model_name + super(Model, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=model_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + + +class Dataset(base.BaseArtifactSchema): + """An artifact representing a system Dataset.""" + + SCHEMA_TITLE = "system.Dataset" + + def __init__( + self, + dataset_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + dataset_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The URI for the assets of this base. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = dataset_name + super(Dataset, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=dataset_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + + +class Metrics(base.BaseArtifactSchema): + """Artifact type for scalar metrics.""" + + SCHEMA_TITLE = "system.Metrics" + + def __init__( + self, + metrics_name: Optional[str] = None, + accuracy: Optional[float] = 0, + precision: Optional[float] = 0, + recall: Optional[float] = 0, + f1score: Optional[float] = 0, + mean_absolute_error: Optional[float] = 0, + mean_squared_error: Optional[float] = 0, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + **kwargs, + ): + """Args: + metrics_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + accuracy (float): + Optional. Defaults to zero. + precision (float): + Optional. Defaults to zero. + recall (float): + Optional. Defaults to zero. + f1score (float): + Optional. Defaults to zero. + mean_absolute_error (float): + Optional. Defaults to zero. + mean_squared_error (float): + Optional. Defaults to zero. + uri (str): + Optional. The URI for the assets of this base. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the base. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["accuracy"] = accuracy + extended_metadata["precision"] = precision + extended_metadata["recall"] = recall + extended_metadata["f1score"] = f1score + extended_metadata["mean_absolute_error"] = mean_absolute_error + extended_metadata["mean_squared_error"] = mean_squared_error + extended_metadata["resourceName"] = metrics_name + + super(Metrics, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=metrics_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + ) + \ No newline at end of file diff --git a/google/cloud/aiplatform/metadata/types/utils.py b/google/cloud/aiplatform/metadata/types/utils.py new file mode 100644 index 0000000000..61f9064690 --- /dev/null +++ b/google/cloud/aiplatform/metadata/types/utils.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import namedtuple +from typing import Optional, Dict, NamedTuple, List +from dataclasses import dataclass +from google.cloud.aiplatform.metadata import artifact +from itertools import zip_longest + + +@dataclass +class PredictSchemata: + """A class holding instance, parameter and prediction schema uris. + + Args: + instance_schema_uri (str): + Required. Points to a YAML file stored on Google Cloud Storage describing the format of a single instance, which are used in PredictRequest.instances, ExplainRequest.instances and BatchPredictionJob.input_config. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + parameters_schema_uri (str): + Required. Points to a YAML file stored on Google Cloud Storage describing the parameters of prediction and explanation via PredictRequest.parameters, ExplainRequest.parameters and BatchPredictionJob.model_parameters. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + prediction_schema_uri (str): + Required. Points to a YAML file stored on Google Cloud Storage describing the format of a single prediction produced by this Model, which are returned via PredictResponse.predictions, ExplainResponse.explanations, and BatchPredictionJob.output_config. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + """ + + instance_schema_uri: str + parameters_schema_uri: str + prediction_schema_uri: str + + def to_dict(self): + """ML metadata schema dictionary representation of this DataClass""" + results = {} + results["instanceSchemaUri"] = self.instance_schema_uri + results["parametersSchemaUri"] = self.parameters_schema_uri + results["predictionSchemaUri"] = self.prediction_schema_uri + + return results + + +@dataclass +class ContainerSpec: + """Container configuration for the model. + Args: + image_uri (str): + Required. URI of the Docker image to be used as the custom container for serving predictions. This URI must identify an image in Artifact Registry or Container Registry. Learn more about the `container publishing requirements + command (Sequence[str]): + Optional. Specifies the command that runs when the container starts. This overrides the container's `ENTRYPOINT + args (Sequence[str]): + Optional. Specifies arguments for the command that runs when the container starts. This overrides the container's ```CMD`` + env (Sequence[google.cloud.aiplatform_v1.types.EnvVar]): + Optional. List of environment variables to set in the container. After the container starts running, code running in the container can read these environment variables. Additionally, the command and args fields can reference these variables. Later entries in this list can also reference earlier entries. For example, the following example sets the variable ``VAR_2`` to have the value ``foo bar``: .. code:: json [ { "name": "VAR_1", "value": "foo" }, { "name": "VAR_2", "value": "$(VAR_1) bar" } ] If you switch the order of the variables in the example, then the expansion does not occur. This field corresponds to the ``env`` field of the Kubernetes Containers `v1 core API. + ports (Sequence[google.cloud.aiplatform_v1.types.Port]): + Optional. List of ports to expose from the container. Vertex AI sends any prediction requests that it receives to the first port on this list. Vertex AI also sends `liveness and health checks. + predict_route (str): + Optional. HTTP path on the container to send prediction requests to. Vertex AI forwards requests sent using projects.locations.endpoints.predict to this path on the container's IP address and port. Vertex AI then returns the container's response in the API response. For example, if you set this field to ``/foo``, then when Vertex AI receives a prediction request, it forwards the request body in a POST request to the ``/foo`` path on the port of your container specified by the first value of this ``ModelContainerSpec``'s ports field. If you don't specify this field, it defaults to the following value when you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1.EndpointService.DeployModel]: /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict The placeholders in this value are replaced as follows: - ENDPOINT: The last segment (following ``endpoints/``)of the Endpoint.name][] field of the Endpoint where this Model has been deployed. (Vertex AI makes this value available to your container code as the ```AIP_ENDPOINT_ID`` environment variable + health_route (str): + Optional. HTTP path on the container to send health checks to. Vertex AI intermittently sends GET requests to this path on the container's IP address and port to check that the container is healthy. Read more about `health checks + display_name (str): + """ + + image_uri: str + command: Optional[List[str]] = None + args: Optional[List[str]] = None + env: Optional[List[Dict[str, str]]] = None + ports: Optional[List[int]] = None + predict_route: Optional[str] = None + health_route: Optional[str] = None + + def to_dict(self): + """ML metadata schema dictionary representation of this DataClass""" + results = {} + results["imageUri"] = self.image_uri + if self.command: + results["command"] = self.command + if self.args: + results["args"] = self.args + if self.env: + results["env"] = self.env + if self.ports: + results["ports"] = self.ports + if self.predict_route: + results["predictRoute"] = self.predict_route + if self.health_route: + results["healthRoute"] = self.health_route + + return results + \ No newline at end of file diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py new file mode 100644 index 0000000000..ab3dc97838 --- /dev/null +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -0,0 +1,426 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +from importlib import reload +from unittest import mock +from unittest.mock import patch, call + +import pytest +from google.cloud import aiplatform +from google.cloud.aiplatform import initializer +from google.cloud.aiplatform.metadata import metadata +from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.metadata.types import google_types +from google.cloud.aiplatform.metadata.types import system_types +from google.cloud.aiplatform.metadata.types import utils + +from google.cloud.aiplatform_v1 import MetadataServiceClient +from google.cloud.aiplatform_v1 import Artifact as GapicArtifact + +# project +_TEST_PROJECT = "test-project" +_TEST_LOCATION = "us-central1" +_TEST_METADATA_STORE = "test-metadata-store" +_TEST_ALT_LOCATION = "europe-west4" +_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/{_TEST_METADATA_STORE}" + +# resource attributes +_TEST_URI = "test-uri" +_TEST_DISPLAY_NAME = "test-display-name" +_TEST_SCHEMA_TITLE = "test.Example" +_TEST_SCHEMA_VERSION = "0.0.1" +_TEST_DESCRIPTION = "test description" +_TEST_METADATA = {"test-param1": 1, "test-param2": "test-value", "test-param3": True} +_TEST_UPDATED_METADATA = { + "test-param1": 2, + "test-param2": "test-value-1", + "test-param3": False, +} + +# context +_TEST_CONTEXT_ID = "test-context-id" +_TEST_CONTEXT_NAME = f"{_TEST_PARENT}/contexts/{_TEST_CONTEXT_ID}" + +# artifact +_TEST_ARTIFACT_ID = "test-artifact-id" +_TEST_ARTIFACT_NAME = f"{_TEST_PARENT}/artifacts/{_TEST_ARTIFACT_ID}" + + +@pytest.fixture +def create_artifact_mock(): + with patch.object(MetadataServiceClient, "create_artifact") as create_artifact_mock: + create_artifact_mock.return_value = GapicArtifact( + name=_TEST_ARTIFACT_NAME, + display_name=_TEST_DISPLAY_NAME, + schema_title=_TEST_SCHEMA_TITLE, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_METADATA, + state=GapicArtifact.State.STATE_UNSPECIFIED, + ) + yield create_artifact_mock + + +class TestMetadataBaseSchema: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_base_class_overrides_default_schema_title(self): + artifact = base.BaseArtifactSchema(schema_title=_TEST_SCHEMA_TITLE) + assert artifact.schema_title == _TEST_SCHEMA_TITLE + + def test_base_class_overrides_resouce_id_from_resouce_name(self): + artifact = base.BaseArtifactSchema(resource_name=_TEST_ARTIFACT_NAME) + assert artifact.resource_id == _TEST_ARTIFACT_ID + + def test_base_class_overrides_default_version(self): + artifact = base.BaseArtifactSchema(schema_version=_TEST_SCHEMA_VERSION) + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_base_class_init_remaining_parameters_are_assigned_correctly(self): + artifact = base.BaseArtifactSchema( + schema_title=_TEST_SCHEMA_TITLE, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.schema_title == _TEST_SCHEMA_TITLE + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + + @pytest.mark.usefixtures("create_artifact_mock") + def test_create_is_called_with_default_parameters(self, create_artifact_mock): + aiplatform.init(project=_TEST_PROJECT) + base_artifact = base.BaseArtifactSchema( + schema_title=_TEST_SCHEMA_TITLE, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + base_artifact.create(metadata_store_id=_TEST_METADATA_STORE) + create_artifact_mock.assert_called_once_with( + parent=_TEST_PARENT, artifact=mock.ANY, artifact_id=None + ) + + +class TestMetadataGoogleTypes: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_vertex_dataset_schema_title_is_set_correctly(self): + artifact = google_types.VertexDataset() + assert artifact.schema_title == "google.VertexDataset" + + def test_vertex_dataset_resouce_name_is_set_in_metadata(self): + artifact = google_types.VertexDataset(dataset_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_vertex_dataset_constructor_parameters_are_set_correctly(self): + artifact = google_types.VertexDataset( + dataset_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_vertex_model_schema_title_is_set_correctly(self): + artifact = google_types.VertexModel() + assert artifact.schema_title == "google.VertexModel" + + def test_vertex_model_resouce_name_is_set_in_metadata(self): + artifact = google_types.VertexModel(vertex_model_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_vertex_model_constructor_parameters_are_set_correctly(self): + artifact = google_types.VertexModel( + vertex_model_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_vertex_endpoint_schema_title_is_set_correctly(self): + artifact = google_types.VertexEndpoint() + assert artifact.schema_title == "google.VertexEndpoint" + + def test_vertex_endpoint_resouce_name_is_set_in_metadata(self): + artifact = google_types.VertexEndpoint(vertex_endpoint_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): + artifact = google_types.VertexEndpoint( + vertex_endpoint_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_unmanaged_container_model_title_is_set_correctly(self): + predict_schema_ta = utils.PredictSchemata( + instance_schema_uri="instance_uri", + prediction_schema_uri="prediction_uri", + parameters_schema_uri="parameters_uri", + ) + + container_spec = utils.ContainerSpec( + image_uri="gcr.io/test_container_image_uri" + ) + artifact = google_types.UnmanagedContainerModel( + predict_schema_ta=predict_schema_ta, + container_spec=container_spec, + ) + assert artifact.schema_title == "google.UnmanagedContainerModel" + + def test_unmanaged_container_model_resouce_name_is_set_in_metadata(self): + predict_schema_ta = utils.PredictSchemata( + instance_schema_uri="instance_uri", + prediction_schema_uri="prediction_uri", + parameters_schema_uri="parameters_uri", + ) + + container_spec = utils.ContainerSpec( + image_uri="gcr.io/test_container_image_uri" + ) + artifact = google_types.UnmanagedContainerModel( + predict_schema_ta=predict_schema_ta, + container_spec=container_spec, + unmanaged_container_model_name=_TEST_ARTIFACT_NAME, + ) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self): + predict_schema_ta = utils.PredictSchemata( + instance_schema_uri="instance_uri", + prediction_schema_uri="prediction_uri", + parameters_schema_uri="parameters_uri", + ) + + container_spec = utils.ContainerSpec( + image_uri="gcr.io/test_container_image_uri" + ) + + artifact = google_types.UnmanagedContainerModel( + predict_schema_ta=predict_schema_ta, + container_spec=container_spec, + unmanaged_container_model_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + +class TestMetadataSystemTypes: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_system_dataset_schema_title_is_set_correctly(self): + artifact = system_types.Dataset() + assert artifact.schema_title == "system.Dataset" + + def test_system_dataset_resouce_name_is_set_in_metadata(self): + artifact = system_types.Dataset(dataset_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_system_dataset_constructor_parameters_are_set_correctly(self): + artifact = system_types.Dataset( + dataset_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_system_model_schema_title_is_set_correctly(self): + artifact = system_types.Model() + assert artifact.schema_title == "system.Model" + + def test_system_model_resouce_name_is_set_in_metadata(self): + artifact = system_types.Model(model_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_system_model_constructor_parameters_are_set_correctly(self): + artifact = system_types.Model( + model_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + + def test_system_metrics_schema_title_is_set_correctly(self): + artifact = system_types.Metrics() + assert artifact.schema_title == "system.Metrics" + + def test_system_metrics_resouce_name_is_set_in_metadata(self): + artifact = system_types.Metrics(metrics_name=_TEST_ARTIFACT_NAME) + assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME + + def test_system_metrics_constructor_parameters_are_set_correctly(self): + artifact = system_types.Metrics( + metrics_name=_TEST_ARTIFACT_NAME, + accuracy=0.1, + precision=0.2, + recall=0.3, + f1score=0.4, + mean_absolute_error=0.5, + mean_squared_error=0.6, + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.schema_version == _TEST_SCHEMA_VERSION + assert artifact.metadata["accuracy"] == 0.1 + assert artifact.metadata["precision"] == 0.2 + assert artifact.metadata["recall"] == 0.3 + assert artifact.metadata["f1score"] == 0.4 + assert artifact.metadata["mean_absolute_error"] == 0.5 + assert artifact.metadata["mean_squared_error"] == 0.6 + + +class TestMetadataUtils: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_predict_schemata_to_dict_method_returns_correct_schema(self): + predict_schema_ta = utils.PredictSchemata( + instance_schema_uri="instance_uri", + prediction_schema_uri="prediction_uri", + parameters_schema_uri="parameters_uri", + ) + expected_results = { + "instanceSchemaUri": "instance_uri", + "parametersSchemaUri": "parameters_uri", + "predictionSchemaUri": "prediction_uri", + } + + assert json.dumps(predict_schema_ta.to_dict()) == json.dumps(expected_results) + + def test_container_spec_to_dict_method_returns_correct_schema(self): + container_spec = utils.ContainerSpec( + image_uri="gcr.io/some_container_image_uri", + command=["test_command"], + args=["test_args"], + env=[{"env_var_name": "env_var_value"}], + ports=[1], + predict_route="test_prediction_rout", + health_route="test_health_rout", + ) + + expected_results = { + "imageUri": "gcr.io/some_container_image_uri", + "command": ["test_command"], + "args": ["test_args"], + "env": [{"env_var_name": "env_var_value"}], + "ports": [1], + "predictRoute": "test_prediction_rout", + "healthRoute": "test_health_rout", + } + + assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) + + def test_container_spec_to_dict_method_returns_correct_schema(self): + container_spec = utils.ContainerSpec( + image_uri="gcr.io/some_container_image_uri", + command=["test_command"], + args=["test_args"], + env=[{"env_var_name": "env_var_value"}], + ports=[1], + predict_route="test_prediction_rout", + health_route="test_health_rout", + ) + + expected_results = { + "imageUri": "gcr.io/some_container_image_uri", + "command": ["test_command"], + "args": ["test_args"], + "env": [{"env_var_name": "env_var_value"}], + "ports": [1], + "predictRoute": "test_prediction_rout", + "healthRoute": "test_health_rout", + } + + assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) \ No newline at end of file From 03b0b94dab7bff6c66e588a95e3371d4084037b7 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 15:04:18 +0000 Subject: [PATCH 02/48] add support for artifact create --- google/cloud/aiplatform/metadata/artifact.py | 26 ++++++++++++++++--- .../cloud/aiplatform/metadata/types/base.py | 3 +++ .../aiplatform/metadata/types/google_types.py | 14 ++++++---- .../aiplatform/metadata/types/system_types.py | 11 +++++--- .../cloud/aiplatform/metadata/types/utils.py | 2 +- .../aiplatform/test_metadata_schema_types.py | 2 +- 6 files changed, 44 insertions(+), 14 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 45e20731d5..63974cb8eb 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -32,7 +32,7 @@ from google.cloud.aiplatform.metadata import resource from google.cloud.aiplatform.metadata import utils as metadata_utils from google.cloud.aiplatform.utils import rest_utils - +from google.cloud.aiplatform.metadata.types import base as types_base _LOGGER = base.Logger(__name__) @@ -176,6 +176,7 @@ def _create( """ api_client = cls._instantiate_client(location=location, credentials=credentials) + api_client = cls._instantiate_client(location=location, credentials=credentials) parent = utils.full_resource_name( resource_name=metadata_store_id, @@ -249,8 +250,8 @@ def _list_resources( @classmethod def create( cls, - schema_title: str, *, + schema_title: Optional[str] = None, resource_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, @@ -262,12 +263,13 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, + base_artifact: Optional[types_base.BaseArtifactSchema] = None, ) -> "Artifact": """Creates a new Metadata Artifact. Args: schema_title (str): - Required. schema_title identifies the schema title used by the Artifact. + Optional. schema_title identifies the schema title used by the Artifact. Please reference https://cloud.google.com/vertex-ai/docs/ml-metadata/system-schemas. resource_id (str): @@ -307,10 +309,28 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Artifact. Overrides credentials set in aiplatform.init. + base_artifact (BaseArtifactType): + Optional. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides + the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ + if base_artifact: + return cls._create( + resource_id=base_artifact.resource_id, + schema_title=base_artifact.schema_title, + uri=base_artifact.uri, + display_name=base_artifact.display_name, + schema_version=base_artifact.schema_version, + description=base_artifact.description, + metadata=base_artifact.metadata, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) + return cls._create( resource_id=resource_id, schema_title=schema_title, diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base.py index 475570654b..3777812acb 100644 --- a/google/cloud/aiplatform/metadata/types/base.py +++ b/google/cloud/aiplatform/metadata/types/base.py @@ -41,6 +41,9 @@ class BaseArtifactSchema(object): Optional. The resource name of the Artifact following the format as follows. This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py index 135b7e43a7..3288a88ea2 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,7 +42,8 @@ def __init__( more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets/get uri (str): - Optional. The URI for the assets of this Artifact. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -90,7 +91,8 @@ def __init__( more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models/get uri (str): - Optional. The URI for the assets of this Artifact. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -140,7 +142,8 @@ def __init__( more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/get uri (str): - Optional. The URI for the assets of this Artifact. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -194,7 +197,8 @@ def __init__( This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): - Optional. The URI for the assets of this Artifact. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py index 8db4fcfebe..570977f44b 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -41,7 +41,8 @@ def __init__( This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): - Optional. The URI for the assets of this base. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -88,7 +89,8 @@ def __init__( This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): - Optional. The URI for the assets of this base. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -153,7 +155,8 @@ def __init__( mean_squared_error (float): Optional. Defaults to zero. uri (str): - Optional. The URI for the assets of this base. + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. display_name (str): Optional. The user-defined name of the base. schema_version (str): diff --git a/google/cloud/aiplatform/metadata/types/utils.py b/google/cloud/aiplatform/metadata/types/utils.py index 61f9064690..b5d1a91beb 100644 --- a/google/cloud/aiplatform/metadata/types/utils.py +++ b/google/cloud/aiplatform/metadata/types/utils.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index ab3dc97838..25bb18bca1 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 380135d4658d693ff4a18b31e19cadaa3a7e7942 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 15:22:30 +0000 Subject: [PATCH 03/48] add unit tests for create from artifact parameters --- tests/unit/aiplatform/test_metadata_schema_types.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index 25bb18bca1..72f267a6b1 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -125,7 +125,12 @@ def test_create_is_called_with_default_parameters(self, create_artifact_mock): create_artifact_mock.assert_called_once_with( parent=_TEST_PARENT, artifact=mock.ANY, artifact_id=None ) - + _, _, kwargs = create_artifact_mock.mock_calls[0] + assert kwargs['artifact'].schema_title == _TEST_SCHEMA_TITLE + assert kwargs['artifact'].uri == _TEST_URI + assert kwargs['artifact'].display_name == _TEST_DISPLAY_NAME + assert kwargs['artifact'].description == _TEST_DESCRIPTION + assert kwargs['artifact'].metadata == _TEST_UPDATED_METADATA class TestMetadataGoogleTypes: def setup_method(self): From 49e34cc865f9bcd9347f874f673df0f91c9436d8 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 15:23:20 +0000 Subject: [PATCH 04/48] update formatting --- google/cloud/aiplatform/metadata/types/base.py | 2 +- .../cloud/aiplatform/metadata/types/google_types.py | 1 - .../cloud/aiplatform/metadata/types/system_types.py | 1 - google/cloud/aiplatform/metadata/types/utils.py | 1 - tests/unit/aiplatform/test_metadata_schema_types.py | 13 +++++++------ 5 files changed, 8 insertions(+), 10 deletions(-) diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base.py index 3777812acb..de65068568 100644 --- a/google/cloud/aiplatform/metadata/types/base.py +++ b/google/cloud/aiplatform/metadata/types/base.py @@ -123,4 +123,4 @@ def create( location=location, credentials=credentials, ) - return self.artifact + return self.artifact diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py index 3288a88ea2..6e8843aaa7 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -225,4 +225,3 @@ def __init__( description=description, metadata=extended_metadata, ) - \ No newline at end of file diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py index 570977f44b..f7d829f9e9 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -187,4 +187,3 @@ def __init__( description=description, metadata=extended_metadata, ) - \ No newline at end of file diff --git a/google/cloud/aiplatform/metadata/types/utils.py b/google/cloud/aiplatform/metadata/types/utils.py index b5d1a91beb..90c360bf01 100644 --- a/google/cloud/aiplatform/metadata/types/utils.py +++ b/google/cloud/aiplatform/metadata/types/utils.py @@ -95,4 +95,3 @@ def to_dict(self): results["healthRoute"] = self.health_route return results - \ No newline at end of file diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index 72f267a6b1..3782f58e72 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -126,11 +126,12 @@ def test_create_is_called_with_default_parameters(self, create_artifact_mock): parent=_TEST_PARENT, artifact=mock.ANY, artifact_id=None ) _, _, kwargs = create_artifact_mock.mock_calls[0] - assert kwargs['artifact'].schema_title == _TEST_SCHEMA_TITLE - assert kwargs['artifact'].uri == _TEST_URI - assert kwargs['artifact'].display_name == _TEST_DISPLAY_NAME - assert kwargs['artifact'].description == _TEST_DESCRIPTION - assert kwargs['artifact'].metadata == _TEST_UPDATED_METADATA + assert kwargs["artifact"].schema_title == _TEST_SCHEMA_TITLE + assert kwargs["artifact"].uri == _TEST_URI + assert kwargs["artifact"].display_name == _TEST_DISPLAY_NAME + assert kwargs["artifact"].description == _TEST_DESCRIPTION + assert kwargs["artifact"].metadata == _TEST_UPDATED_METADATA + class TestMetadataGoogleTypes: def setup_method(self): @@ -428,4 +429,4 @@ def test_container_spec_to_dict_method_returns_correct_schema(self): "healthRoute": "test_health_rout", } - assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) \ No newline at end of file + assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) From 9471dbf6cf482ae7e39ef47250c4828040e2c988 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 18:22:04 +0000 Subject: [PATCH 05/48] fix lint issues --- .../aiplatform/test_metadata_schema_types.py | 25 +------------------ 1 file changed, 1 insertion(+), 24 deletions(-) diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index 3782f58e72..ff6d468957 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -18,7 +18,7 @@ import json from importlib import reload from unittest import mock -from unittest.mock import patch, call +from unittest.mock import patch import pytest from google.cloud import aiplatform @@ -407,26 +407,3 @@ def test_container_spec_to_dict_method_returns_correct_schema(self): } assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) - - def test_container_spec_to_dict_method_returns_correct_schema(self): - container_spec = utils.ContainerSpec( - image_uri="gcr.io/some_container_image_uri", - command=["test_command"], - args=["test_args"], - env=[{"env_var_name": "env_var_value"}], - ports=[1], - predict_route="test_prediction_rout", - health_route="test_health_rout", - ) - - expected_results = { - "imageUri": "gcr.io/some_container_image_uri", - "command": ["test_command"], - "args": ["test_args"], - "env": [{"env_var_name": "env_var_value"}], - "ports": [1], - "predictRoute": "test_prediction_rout", - "healthRoute": "test_health_rout", - } - - assert json.dumps(container_spec.to_dict()) == json.dumps(expected_results) From 8d0b9e5a2ab5495c7e7d52757eddb7c1796ec1f0 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 14 Jun 2022 21:40:18 +0000 Subject: [PATCH 06/48] Add integration tests --- tests/system/aiplatform/test_metadata.py | 90 ++++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/tests/system/aiplatform/test_metadata.py b/tests/system/aiplatform/test_metadata.py index d5c076e75b..9a70abdadf 100644 --- a/tests/system/aiplatform/test_metadata.py +++ b/tests/system/aiplatform/test_metadata.py @@ -19,6 +19,10 @@ from google.cloud import aiplatform from tests.system.aiplatform import e2e_base +from google.cloud.aiplatform.metadata.types import google_types +from google.cloud.aiplatform.metadata.types import system_types +from google.cloud.aiplatform.metadata.types import base as schema_base_type +import json PARAMS = {"sdk-param-test-1": 0.1, "sdk-param-test-2": 0.2} @@ -70,3 +74,89 @@ def test_experiment_logging(self, shared_state): true_df_dict["run_name"] = run_name assert true_df_dict == df.to_dict("records")[0] + + def test_artifact_creation_using_schema_base_class(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("base-artifact")[:30] + artifact_uri = self._make_display_name("base-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("base-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = schema_base_type.BaseArtifactSchema( + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.schema_title == "system.Artifact" + assert artifact.description == artifact_description + assert artifact.resource_name.startswith( + f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/metadataStores/default/artifacts/" + ) + + def test_system_dataset_artifact_create(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("dataset-artifact")[:30] + artifact_uri = self._make_display_name("dataset-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("dataset-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = system_types.Dataset( + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.schema_title == "system.Dataset" + assert artifact.description == artifact_description + assert artifact.resource_name.startswith( + f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/metadataStores/default/artifacts/" + ) + + def test_google_dataset_artifact_create(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("ds-artifact")[:30] + artifact_uri = self._make_display_name("vertex-dataset-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("vertex-dataset-description") + dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/{artifact_display_name}" + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = google_types.VertexDataset( + dataset_name=dataset_name, + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + expected_metadata = artifact_metadata + expected_metadata["resourceName"] = dataset_name + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) + assert artifact.schema_title == "google.VertexDataset" + assert artifact.description == artifact_description + assert artifact.resource_name == dataset_name From 1bd08aacf457a1ec5244f7fcd94acdc538d1cd10 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 15 Jun 2022 22:43:57 +0000 Subject: [PATCH 07/48] add support for execution types --- google/cloud/aiplatform/metadata/artifact.py | 1 - google/cloud/aiplatform/metadata/execution.py | 29 +- .../cloud/aiplatform/metadata/types/base.py | 100 ++++++ .../aiplatform/metadata/types/google_types.py | 4 + .../aiplatform/metadata/types/system_types.py | 292 ++++++++++++++++++ 5 files changed, 423 insertions(+), 3 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 63974cb8eb..80e42fd7a3 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -270,7 +270,6 @@ def create( Args: schema_title (str): Optional. schema_title identifies the schema title used by the Artifact. - Please reference https://cloud.google.com/vertex-ai/docs/ml-metadata/system-schemas. resource_id (str): Optional. The portion of the Artifact name with diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 9a85bce36f..22de00e740 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -31,6 +31,7 @@ from google.cloud.aiplatform.metadata import artifact from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform.metadata import resource +from google.cloud.aiplatform.metadata.types import base as types_base class Execution(resource._Resource): @@ -89,8 +90,8 @@ def state(self) -> gca_execution.Execution.State: @classmethod def create( cls, - schema_title: str, *, + schema_title: Optional[str] = None, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, resource_id: Optional[str] = None, display_name: Optional[str] = None, @@ -101,13 +102,15 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials=Optional[auth_credentials.Credentials], + base_execution: Optional[types_base.BaseExecutionSchema] = None, ) -> "Execution": """ Creates a new Metadata Execution. Args: schema_title (str): - Required. schema_title identifies the schema title used by the Execution. + Optional. schema_title identifies the schema title used by the Execution. + Either schema_title or base_execution must be provided. state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. resource_id (str): @@ -137,6 +140,9 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. + base_execution (BaseExecutionSchema): + Optional. An instance of the BaseExecutionSchema class that can be provided instead of providing schema specific parameters. It overrides + the values provided for schema_title, resource_id, state, display_name, schema_version, description, and metadata. Returns: Execution: Instantiated representation of the managed Metadata Execution. @@ -147,6 +153,25 @@ def create( ) super(base.VertexAiResourceNounWithFutureManager, self).__init__() + if base_execution: + resource = Execution._create_resource( + client=self.api_client, + parent=metadata_store._MetadataStore._format_resource_name( + project=self.project, + location=self.location, + metadata_store=metadata_store_id, + ), + schema_title=base_execution.schema_title, + resource_id=base_execution.resource_id, + metadata=base_execution.metadata, + description=base_execution.description, + display_name=base_execution.display_name, + schema_version=base_execution.schema_version, + state=base_execution.state, + ) + self._gca_resource = resource + return self + resource = Execution._create_resource( client=self.api_client, parent=metadata_store._MetadataStore._format_resource_name( diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base.py index de65068568..638f7ba937 100644 --- a/google/cloud/aiplatform/metadata/types/base.py +++ b/google/cloud/aiplatform/metadata/types/base.py @@ -18,6 +18,7 @@ from google.auth import credentials as auth_credentials from typing import Optional, Dict from google.cloud.aiplatform.metadata import artifact +from google.cloud.aiplatform.metadata import execution from google.cloud.aiplatform import base from google.cloud.aiplatform.metadata import constants @@ -88,6 +89,7 @@ def __init__( self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION self.description = description self.metadata = metadata + self.kwargs = kwargs def create( self, @@ -123,4 +125,102 @@ def create( location=location, credentials=credentials, ) + return self.execution + + +class BaseExecutionSchema(object): + """Base class for Metadata Execution schema. + + This is the base class for defining various execution types. + + Args: + schema_title (str): + Required. schema_title identifies the schema title used by the Execution. + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + resource_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + + ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" + SCHEMA_TITLE = "system.ContainerExecution" + + def __init__( + self, + schema_title: Optional[str] = None, + state: execution.Execution.State = execution.Execution.State.RUNNING, + resource_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + + """Initializes the Execution with the given name, URI and metadata.""" + self.schema_title = BaseExecutionSchema.SCHEMA_TITLE + if schema_title: + self.schema_title = schema_title + self.resource_name = resource_name + self.state = state + + self.resource_id = None + if resource_name: + # Temporary work around while Execution.create takes resource_id instead of resource_name + self.resource_id = resource_name.split("/")[-1] + + self.display_name = display_name + self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION + self.metadata = metadata + self.description = description + self.kwargs = kwargs + + def create( + self, + metadata_store_id: Optional[str] = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Creates a new Metadata Execution. + + Args: + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//executions/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Execution. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Execution. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Execution. Overrides + credentials set in aiplatform.init. + Returns: + Execution: Instantiated representation of the managed Metadata Execution. + + """ + self.exectuion = execution.Execution.create( + base_artifact=self, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) return self.artifact diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py index 6e8843aaa7..ae5f85f003 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -66,6 +66,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) @@ -117,6 +118,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) @@ -167,6 +169,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) @@ -224,4 +227,5 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py index f7d829f9e9..eb7d0d4f95 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -15,6 +15,7 @@ # limitations under the License. # from typing import Optional, Dict, List +from google.cloud.aiplatform.metadata import execution from google.cloud.aiplatform.metadata.types import base from google.cloud.aiplatform.metadata.types import utils from itertools import zip_longest @@ -65,6 +66,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) @@ -113,6 +115,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, ) @@ -186,4 +189,293 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + kwargs=kwargs, + ) + + +class ContainerExecution(base.BaseExecutionSchema): + """Execution type for a container execution.""" + + SCHEMA_TITLE = "system.ContainerExecution" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(ContainerExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class ImporterExecution(base.BaseExecutionSchema): + """Execution type for a importer execution.""" + + SCHEMA_TITLE = "system.ImporterExecution" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(ImporterExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class ResolverExecution(base.BaseExecutionSchema): + """Execution type for a resolver execution.""" + + SCHEMA_TITLE = "system.ResolverExecution" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(ResolverExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class DagExecution(base.BaseExecutionSchema): + """Execution type for a dag execution.""" + + SCHEMA_TITLE = "system.DagExecution" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(DagExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class CustomJobExecution(base.BaseExecutionSchema): + """Execution type for a custom job execution.""" + + SCHEMA_TITLE = "system.CustomJobExecution" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(CustomJobExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class RunExecution(base.BaseExecutionSchema): + """Execution type for root run execution.""" + + SCHEMA_TITLE = "system.Run" + + def __init__( + self, + state: execution.Execution.State = execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(RunExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, ) From 947249fe11a61c666a9613f59daaa6c28e2ec884 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 15 Jun 2022 22:47:27 +0000 Subject: [PATCH 08/48] correct execution type in create --- google/cloud/aiplatform/metadata/types/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base.py index 638f7ba937..5d87aebcce 100644 --- a/google/cloud/aiplatform/metadata/types/base.py +++ b/google/cloud/aiplatform/metadata/types/base.py @@ -217,7 +217,7 @@ def create( """ self.exectuion = execution.Execution.create( - base_artifact=self, + base_execution=self, metadata_store_id=metadata_store_id, project=project, location=location, From c23efa92f722e7d257a1998317fd1bce92600e53 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 06:40:44 +0000 Subject: [PATCH 09/48] add execution support and unit tests --- google/cloud/aiplatform/metadata/artifact.py | 22 +-- google/cloud/aiplatform/metadata/execution.py | 24 ++-- .../types/{base.py => base_artifact.py} | 107 +-------------- .../metadata/types/base_execution.py | 120 +++++++++++++++++ .../aiplatform/metadata/types/google_types.py | 10 +- .../aiplatform/metadata/types/system_types.py | 38 +++--- tests/system/aiplatform/test_metadata.py | 4 +- .../aiplatform/test_metadata_schema_types.py | 127 ++++++++++++++++-- 8 files changed, 290 insertions(+), 162 deletions(-) rename google/cloud/aiplatform/metadata/types/{base.py => base_artifact.py} (54%) create mode 100644 google/cloud/aiplatform/metadata/types/base_execution.py diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 80e42fd7a3..9f1337dbc2 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -32,7 +32,7 @@ from google.cloud.aiplatform.metadata import resource from google.cloud.aiplatform.metadata import utils as metadata_utils from google.cloud.aiplatform.utils import rest_utils -from google.cloud.aiplatform.metadata.types import base as types_base +from google.cloud.aiplatform.metadata.types import base_artifact _LOGGER = base.Logger(__name__) @@ -263,7 +263,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - base_artifact: Optional[types_base.BaseArtifactSchema] = None, + base_artifact_schema: Optional[base_artifact.BaseArtifactSchema] = None, ) -> "Artifact": """Creates a new Metadata Artifact. @@ -308,22 +308,22 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Artifact. Overrides credentials set in aiplatform.init. - base_artifact (BaseArtifactType): + base_artifact_schema (BaseArtifactType): Optional. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ - if base_artifact: + if base_artifact_schema: return cls._create( - resource_id=base_artifact.resource_id, - schema_title=base_artifact.schema_title, - uri=base_artifact.uri, - display_name=base_artifact.display_name, - schema_version=base_artifact.schema_version, - description=base_artifact.description, - metadata=base_artifact.metadata, + resource_id=base_artifact_schema.resource_id, + schema_title=base_artifact_schema.schema_title, + uri=base_artifact_schema.uri, + display_name=base_artifact_schema.display_name, + schema_version=base_artifact_schema.schema_version, + description=base_artifact_schema.description, + metadata=base_artifact_schema.metadata, metadata_store_id=metadata_store_id, project=project, location=location, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 22de00e740..4536374ac5 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -31,7 +31,7 @@ from google.cloud.aiplatform.metadata import artifact from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform.metadata import resource -from google.cloud.aiplatform.metadata.types import base as types_base +from google.cloud.aiplatform.metadata.types import base_execution class Execution(resource._Resource): @@ -102,7 +102,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials=Optional[auth_credentials.Credentials], - base_execution: Optional[types_base.BaseExecutionSchema] = None, + base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, ) -> "Execution": """ Creates a new Metadata Execution. @@ -110,7 +110,7 @@ def create( Args: schema_title (str): Optional. schema_title identifies the schema title used by the Execution. - Either schema_title or base_execution must be provided. + Either schema_title or base_execution_schema must be provided. state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. resource_id (str): @@ -140,7 +140,7 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. - base_execution (BaseExecutionSchema): + base_execution_schema (BaseExecutionSchema): Optional. An instance of the BaseExecutionSchema class that can be provided instead of providing schema specific parameters. It overrides the values provided for schema_title, resource_id, state, display_name, schema_version, description, and metadata. @@ -153,7 +153,7 @@ def create( ) super(base.VertexAiResourceNounWithFutureManager, self).__init__() - if base_execution: + if base_execution_schema: resource = Execution._create_resource( client=self.api_client, parent=metadata_store._MetadataStore._format_resource_name( @@ -161,13 +161,13 @@ def create( location=self.location, metadata_store=metadata_store_id, ), - schema_title=base_execution.schema_title, - resource_id=base_execution.resource_id, - metadata=base_execution.metadata, - description=base_execution.description, - display_name=base_execution.display_name, - schema_version=base_execution.schema_version, - state=base_execution.state, + schema_title=base_execution_schema.schema_title, + resource_id=base_execution_schema.resource_id, + metadata=base_execution_schema.metadata, + description=base_execution_schema.description, + display_name=base_execution_schema.display_name, + schema_version=base_execution_schema.schema_version, + state=base_execution_schema.state, ) self._gca_resource = resource return self diff --git a/google/cloud/aiplatform/metadata/types/base.py b/google/cloud/aiplatform/metadata/types/base_artifact.py similarity index 54% rename from google/cloud/aiplatform/metadata/types/base.py rename to google/cloud/aiplatform/metadata/types/base_artifact.py index 5d87aebcce..dc783c3c72 100644 --- a/google/cloud/aiplatform/metadata/types/base.py +++ b/google/cloud/aiplatform/metadata/types/base_artifact.py @@ -16,14 +16,9 @@ # from google.auth import credentials as auth_credentials -from typing import Optional, Dict from google.cloud.aiplatform.metadata import artifact -from google.cloud.aiplatform.metadata import execution -from google.cloud.aiplatform import base - from google.cloud.aiplatform.metadata import constants - -_LOGGER = base.Logger(__name__) +from typing import Optional, Dict class BaseArtifactSchema(object): @@ -119,105 +114,7 @@ def create( Artifact: Instantiated representation of the managed Metadata Artifact. """ self.artifact = artifact.Artifact.create( - base_artifact=self, - metadata_store_id=metadata_store_id, - project=project, - location=location, - credentials=credentials, - ) - return self.execution - - -class BaseExecutionSchema(object): - """Base class for Metadata Execution schema. - - This is the base class for defining various execution types. - - Args: - schema_title (str): - Required. schema_title identifies the schema title used by the Execution. - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - resource_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - - ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" - SCHEMA_TITLE = "system.ContainerExecution" - - def __init__( - self, - schema_title: Optional[str] = None, - state: execution.Execution.State = execution.Execution.State.RUNNING, - resource_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - - """Initializes the Execution with the given name, URI and metadata.""" - self.schema_title = BaseExecutionSchema.SCHEMA_TITLE - if schema_title: - self.schema_title = schema_title - self.resource_name = resource_name - self.state = state - - self.resource_id = None - if resource_name: - # Temporary work around while Execution.create takes resource_id instead of resource_name - self.resource_id = resource_name.split("/")[-1] - - self.display_name = display_name - self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION - self.metadata = metadata - self.description = description - self.kwargs = kwargs - - def create( - self, - metadata_store_id: Optional[str] = "default", - project: Optional[str] = None, - location: Optional[str] = None, - credentials: Optional[auth_credentials.Credentials] = None, - ): - """Creates a new Metadata Execution. - - Args: - metadata_store_id (str): - Optional. The portion of the resource name with - the format: - projects/123/locations/us-central1/metadataStores//executions/ - If not provided, the MetadataStore's ID will be set to "default". - project (str): - Optional. Project used to create this Execution. Overrides project set in - aiplatform.init. - location (str): - Optional. Location used to create this Execution. Overrides location set in - aiplatform.init. - credentials (auth_credentials.Credentials): - Optional. Custom credentials used to create this Execution. Overrides - credentials set in aiplatform.init. - Returns: - Execution: Instantiated representation of the managed Metadata Execution. - - """ - self.exectuion = execution.Execution.create( - base_execution=self, + base_artifact_schema=self, metadata_store_id=metadata_store_id, project=project, location=location, diff --git a/google/cloud/aiplatform/metadata/types/base_execution.py b/google/cloud/aiplatform/metadata/types/base_execution.py new file mode 100644 index 0000000000..6b20c336ab --- /dev/null +++ b/google/cloud/aiplatform/metadata/types/base_execution.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth import credentials as auth_credentials +from google.cloud.aiplatform.compat.types import execution as gca_execution +from google.cloud.aiplatform.metadata import constants +from google.cloud.aiplatform.metadata import execution +from typing import Optional, Dict + + +class BaseExecutionSchema(object): + """Base class for Metadata Execution schema. + + This is the base class for defining various execution types. + + Args: + schema_title (str): + Required. schema_title identifies the schema title used by the Execution. + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + resource_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + + ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" + SCHEMA_TITLE = "system.ContainerExecution" + + def __init__( + self, + schema_title: Optional[str] = None, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + resource_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + + """Initializes the Execution with the given name, URI and metadata.""" + self.schema_title = BaseExecutionSchema.SCHEMA_TITLE + if schema_title: + self.schema_title = schema_title + self.resource_name = resource_name + self.state = state + + self.resource_id = None + if resource_name: + # Temporary work around while Execution.create takes resource_id instead of resource_name + self.resource_id = resource_name.split("/")[-1] + + self.display_name = display_name + self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION + self.metadata = metadata + self.description = description + self.kwargs = kwargs + + def create( + self, + metadata_store_id: Optional[str] = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Creates a new Metadata Execution. + + Args: + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//executions/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Execution. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Execution. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Execution. Overrides + credentials set in aiplatform.init. + Returns: + Execution: Instantiated representation of the managed Metadata Execution. + + """ + self.exectuion = execution.Execution.create( + base_execution_schema=self, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) + return self.exectuion diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py index ae5f85f003..6ef5171c64 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -16,11 +16,11 @@ # from typing import Optional, Dict, NamedTuple, List from dataclasses import dataclass -from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.metadata.types import base_artifact from google.cloud.aiplatform.metadata.types import utils -class VertexDataset(base.BaseArtifactSchema): +class VertexDataset(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Dataset.""" SCHEMA_TITLE = "google.VertexDataset" @@ -70,7 +70,7 @@ def __init__( ) -class VertexModel(base.BaseArtifactSchema): +class VertexModel(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Model.""" SCHEMA_TITLE = "google.VertexModel" @@ -122,7 +122,7 @@ def __init__( ) -class VertexEndpoint(base.BaseArtifactSchema): +class VertexEndpoint(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Endpoint.""" SCHEMA_TITLE = "google.VertexEndpoint" @@ -173,7 +173,7 @@ def __init__( ) -class UnmanagedContainerModel(base.BaseArtifactSchema): +class UnmanagedContainerModel(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Unmanaged Container Model.""" SCHEMA_TITLE = "google.UnmanagedContainerModel" diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py index eb7d0d4f95..223819846c 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -15,13 +15,13 @@ # limitations under the License. # from typing import Optional, Dict, List -from google.cloud.aiplatform.metadata import execution -from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.compat.types import execution as gca_execution +from google.cloud.aiplatform.metadata.types import base_artifact +from google.cloud.aiplatform.metadata.types import base_execution from google.cloud.aiplatform.metadata.types import utils -from itertools import zip_longest -class Model(base.BaseArtifactSchema): +class Model(base_artifact.BaseArtifactSchema): """Schemaless Artifact Type to store Markdown file.""" SCHEMA_TITLE = "system.Model" @@ -70,7 +70,7 @@ def __init__( ) -class Dataset(base.BaseArtifactSchema): +class Dataset(base_artifact.BaseArtifactSchema): """An artifact representing a system Dataset.""" SCHEMA_TITLE = "system.Dataset" @@ -119,7 +119,7 @@ def __init__( ) -class Metrics(base.BaseArtifactSchema): +class Metrics(base_artifact.BaseArtifactSchema): """Artifact type for scalar metrics.""" SCHEMA_TITLE = "system.Metrics" @@ -193,14 +193,14 @@ def __init__( ) -class ContainerExecution(base.BaseExecutionSchema): +class ContainerExecution(base_execution.BaseExecutionSchema): """Execution type for a container execution.""" SCHEMA_TITLE = "system.ContainerExecution" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -241,14 +241,14 @@ def __init__( ) -class ImporterExecution(base.BaseExecutionSchema): +class ImporterExecution(base_execution.BaseExecutionSchema): """Execution type for a importer execution.""" SCHEMA_TITLE = "system.ImporterExecution" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -289,14 +289,14 @@ def __init__( ) -class ResolverExecution(base.BaseExecutionSchema): +class ResolverExecution(base_execution.BaseExecutionSchema): """Execution type for a resolver execution.""" SCHEMA_TITLE = "system.ResolverExecution" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -337,14 +337,14 @@ def __init__( ) -class DagExecution(base.BaseExecutionSchema): +class DagExecution(base_execution.BaseExecutionSchema): """Execution type for a dag execution.""" SCHEMA_TITLE = "system.DagExecution" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -385,14 +385,14 @@ def __init__( ) -class CustomJobExecution(base.BaseExecutionSchema): +class CustomJobExecution(base_execution.BaseExecutionSchema): """Execution type for a custom job execution.""" SCHEMA_TITLE = "system.CustomJobExecution" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -433,14 +433,14 @@ def __init__( ) -class RunExecution(base.BaseExecutionSchema): +class Run(base_execution.BaseExecutionSchema): """Execution type for root run execution.""" SCHEMA_TITLE = "system.Run" def __init__( self, - state: execution.Execution.State = execution.Execution.State.RUNNING, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_name: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -469,7 +469,7 @@ def __init__( """ extended_metadata = metadata or {} extended_metadata["resourceName"] = execution_name - super(RunExecution, self).__init__( + super(Run, self).__init__( schema_title=self.SCHEMA_TITLE, resource_name=execution_name, state=state, diff --git a/tests/system/aiplatform/test_metadata.py b/tests/system/aiplatform/test_metadata.py index 9a70abdadf..5947b252a4 100644 --- a/tests/system/aiplatform/test_metadata.py +++ b/tests/system/aiplatform/test_metadata.py @@ -21,7 +21,7 @@ from tests.system.aiplatform import e2e_base from google.cloud.aiplatform.metadata.types import google_types from google.cloud.aiplatform.metadata.types import system_types -from google.cloud.aiplatform.metadata.types import base as schema_base_type +from google.cloud.aiplatform.metadata.types import base_artifact import json @@ -88,7 +88,7 @@ def test_artifact_creation_using_schema_base_class(self): location=e2e_base._LOCATION, ) - artifact = schema_base_type.BaseArtifactSchema( + artifact = base_artifact.BaseArtifactSchema( display_name=artifact_display_name, uri=artifact_uri, metadata=artifact_metadata, diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index ff6d468957..7317e2ddcb 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -24,13 +24,16 @@ from google.cloud import aiplatform from google.cloud.aiplatform import initializer from google.cloud.aiplatform.metadata import metadata -from google.cloud.aiplatform.metadata.types import base +from google.cloud.aiplatform.metadata.types import base_artifact +from google.cloud.aiplatform.metadata.types import base_execution from google.cloud.aiplatform.metadata.types import google_types from google.cloud.aiplatform.metadata.types import system_types from google.cloud.aiplatform.metadata.types import utils +from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform_v1 import MetadataServiceClient from google.cloud.aiplatform_v1 import Artifact as GapicArtifact +from google.cloud.aiplatform_v1 import Execution as GapicExecution # project _TEST_PROJECT = "test-project" @@ -40,6 +43,7 @@ _TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/{_TEST_METADATA_STORE}" # resource attributes +_TEST_STATE = gca_execution.Execution.State.STATE_UNSPECIFIED _TEST_URI = "test-uri" _TEST_DISPLAY_NAME = "test-display-name" _TEST_SCHEMA_TITLE = "test.Example" @@ -60,6 +64,10 @@ _TEST_ARTIFACT_ID = "test-artifact-id" _TEST_ARTIFACT_NAME = f"{_TEST_PARENT}/artifacts/{_TEST_ARTIFACT_ID}" +# execution +_TEST_EXECUTION_ID = "test-execution-id" +_TEST_EXECUTION_NAME = f"{_TEST_PARENT}/executions/{_TEST_EXECUTION_ID}" + @pytest.fixture def create_artifact_mock(): @@ -76,7 +84,24 @@ def create_artifact_mock(): yield create_artifact_mock -class TestMetadataBaseSchema: +@pytest.fixture +def create_execution_mock(): + with patch.object( + MetadataServiceClient, "create_execution" + ) as create_execution_mock: + create_execution_mock.return_value = GapicExecution( + name=_TEST_EXECUTION_NAME, + display_name=_TEST_DISPLAY_NAME, + schema_title=_TEST_SCHEMA_TITLE, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_METADATA, + state=GapicExecution.State.RUNNING, + ) + yield create_execution_mock + + +class TestMetadataBaseArtifactSchema: def setup_method(self): reload(initializer) reload(metadata) @@ -86,19 +111,19 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_base_class_overrides_default_schema_title(self): - artifact = base.BaseArtifactSchema(schema_title=_TEST_SCHEMA_TITLE) + artifact = base_artifact.BaseArtifactSchema(schema_title=_TEST_SCHEMA_TITLE) assert artifact.schema_title == _TEST_SCHEMA_TITLE def test_base_class_overrides_resouce_id_from_resouce_name(self): - artifact = base.BaseArtifactSchema(resource_name=_TEST_ARTIFACT_NAME) + artifact = base_artifact.BaseArtifactSchema(resource_name=_TEST_ARTIFACT_NAME) assert artifact.resource_id == _TEST_ARTIFACT_ID def test_base_class_overrides_default_version(self): - artifact = base.BaseArtifactSchema(schema_version=_TEST_SCHEMA_VERSION) + artifact = base_artifact.BaseArtifactSchema(schema_version=_TEST_SCHEMA_VERSION) assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_base_class_init_remaining_parameters_are_assigned_correctly(self): - artifact = base.BaseArtifactSchema( + artifact = base_artifact.BaseArtifactSchema( schema_title=_TEST_SCHEMA_TITLE, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -114,14 +139,14 @@ def test_base_class_init_remaining_parameters_are_assigned_correctly(self): @pytest.mark.usefixtures("create_artifact_mock") def test_create_is_called_with_default_parameters(self, create_artifact_mock): aiplatform.init(project=_TEST_PROJECT) - base_artifact = base.BaseArtifactSchema( + artifact = base_artifact.BaseArtifactSchema( schema_title=_TEST_SCHEMA_TITLE, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) - base_artifact.create(metadata_store_id=_TEST_METADATA_STORE) + artifact.create(metadata_store_id=_TEST_METADATA_STORE) create_artifact_mock.assert_called_once_with( parent=_TEST_PARENT, artifact=mock.ANY, artifact_id=None ) @@ -133,6 +158,67 @@ def test_create_is_called_with_default_parameters(self, create_artifact_mock): assert kwargs["artifact"].metadata == _TEST_UPDATED_METADATA +class TestMetadataBaseExecutionSchema: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_base_class_overrides_default_schema_title(self): + execution = base_execution.BaseExecutionSchema(schema_title=_TEST_SCHEMA_TITLE) + assert execution.schema_title == _TEST_SCHEMA_TITLE + + def test_base_class_overrides_resouce_id_from_resouce_name(self): + execution = base_execution.BaseExecutionSchema( + resource_name=_TEST_ARTIFACT_NAME + ) + assert execution.resource_id == _TEST_ARTIFACT_ID + + def test_base_class_overrides_default_version(self): + execution = base_execution.BaseExecutionSchema( + schema_version=_TEST_SCHEMA_VERSION + ) + assert execution.schema_version == _TEST_SCHEMA_VERSION + + def test_base_class_init_remaining_parameters_are_assigned_correctly(self): + execution = base_execution.BaseExecutionSchema( + schema_title=_TEST_SCHEMA_TITLE, + state=_TEST_STATE, + display_name=_TEST_DISPLAY_NAME, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert execution.schema_title == _TEST_SCHEMA_TITLE + assert execution.state == _TEST_STATE + assert execution.display_name == _TEST_DISPLAY_NAME + assert execution.description == _TEST_DESCRIPTION + assert execution.metadata == _TEST_UPDATED_METADATA + + @pytest.mark.usefixtures("create_execution_mock") + def test_create_is_called_with_default_parameters(self, create_execution_mock): + aiplatform.init(project=_TEST_PROJECT) + execution = base_execution.BaseExecutionSchema( + schema_title=_TEST_SCHEMA_TITLE, + state=_TEST_STATE, + display_name=_TEST_DISPLAY_NAME, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + execution.create(metadata_store_id=_TEST_METADATA_STORE) + create_execution_mock.assert_called_once_with( + parent=_TEST_PARENT, execution=mock.ANY, execution_id=None + ) + _, _, kwargs = create_execution_mock.mock_calls[0] + assert kwargs["execution"].schema_title == _TEST_SCHEMA_TITLE + assert kwargs["execution"].state == _TEST_STATE + assert kwargs["execution"].display_name == _TEST_DISPLAY_NAME + assert kwargs["execution"].description == _TEST_DESCRIPTION + assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA + + class TestMetadataGoogleTypes: def setup_method(self): reload(initializer) @@ -361,6 +447,31 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): assert artifact.metadata["mean_absolute_error"] == 0.5 assert artifact.metadata["mean_squared_error"] == 0.6 + # Test system.Execution Schemas + def test_system_container_execution_schema_title_is_set_correctly(self): + execution = system_types.ContainerExecution() + assert execution.schema_title == "system.ContainerExecution" + + def test_system_importer_execution_schema_title_is_set_correctly(self): + execution = system_types.ImporterExecution() + assert execution.schema_title == "system.ImporterExecution" + + def test_system_resolver_execution_schema_title_is_set_correctly(self): + execution = system_types.ResolverExecution() + assert execution.schema_title == "system.ResolverExecution" + + def test_system_dag_execution_schema_title_is_set_correctly(self): + execution = system_types.DagExecution() + assert execution.schema_title == "system.DagExecution" + + def test_system_custom_job_execution_schema_title_is_set_correctly(self): + execution = system_types.CustomJobExecution() + assert execution.schema_title == "system.CustomJobExecution" + + def test_system_run_execution_schema_title_is_set_correctly(self): + execution = system_types.Run() + assert execution.schema_title == "system.Run" + class TestMetadataUtils: def setup_method(self): From 8c7350c860984079169b575197f1029cdd99e69e Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 07:22:21 +0000 Subject: [PATCH 10/48] add support for state in artifacts --- google/cloud/aiplatform/metadata/artifact.py | 1 + .../metadata/types/base_artifact.py | 48 +++++++++++-------- .../aiplatform/metadata/types/google_types.py | 45 +++++++++++++++-- .../aiplatform/metadata/types/system_types.py | 35 ++++++++++++-- .../aiplatform/test_metadata_schema_types.py | 39 +++++++++++++-- 5 files changed, 136 insertions(+), 32 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 9f1337dbc2..8d093bd47a 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -324,6 +324,7 @@ def create( schema_version=base_artifact_schema.schema_version, description=base_artifact_schema.description, metadata=base_artifact_schema.metadata, + state=base_artifact_schema.state, metadata_store_id=metadata_store_id, project=project, location=location, diff --git a/google/cloud/aiplatform/metadata/types/base_artifact.py b/google/cloud/aiplatform/metadata/types/base_artifact.py index dc783c3c72..9b33f50c15 100644 --- a/google/cloud/aiplatform/metadata/types/base_artifact.py +++ b/google/cloud/aiplatform/metadata/types/base_artifact.py @@ -18,6 +18,7 @@ from google.auth import credentials as auth_credentials from google.cloud.aiplatform.metadata import artifact from google.cloud.aiplatform.metadata import constants +from google.cloud.aiplatform.compat.types import artifact as gca_artifact from typing import Optional, Dict @@ -30,25 +31,32 @@ class BaseArtifactSchema(object): metadata related to this artifact. Subclasses from ArtifactType can enforce various structure and field requirements for the metadata field. - Args: - schema_title (str): - Optional. The schema title used by the Artifact, defaults to "system.Artifact" - resource_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. - display_name (str): - Optional. The user-defined name of the Artifact. - schema_version (str): - Optional. schema_version specifies the version used by the Artifact. - If not set, defaults to use the latest version. - description (str): - Optional. Describes the purpose of the Artifact to be created. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. + Args: + schema_title (str): + Optional. The schema title used by the Artifact, defaults to "system.Artifact" + resource_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -65,6 +73,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): @@ -84,6 +93,7 @@ def __init__( self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION self.description = description self.metadata = metadata + self.state = state self.kwargs = kwargs def create( diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/types/google_types.py index 6ef5171c64..cd3c990555 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/types/google_types.py @@ -18,6 +18,7 @@ from dataclasses import dataclass from google.cloud.aiplatform.metadata.types import base_artifact from google.cloud.aiplatform.metadata.types import utils +from google.cloud.aiplatform.compat.types import artifact as gca_artifact class VertexDataset(base_artifact.BaseArtifactSchema): @@ -33,6 +34,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -52,7 +54,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -66,6 +75,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) @@ -83,6 +93,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -102,7 +113,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -118,6 +136,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) @@ -135,6 +154,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -154,7 +174,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -169,6 +196,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) @@ -188,6 +216,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -210,7 +239,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -227,5 +263,6 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/types/system_types.py index 223819846c..b307d1dd5e 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/types/system_types.py @@ -18,7 +18,7 @@ from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata.types import base_artifact from google.cloud.aiplatform.metadata.types import base_execution -from google.cloud.aiplatform.metadata.types import utils +from google.cloud.aiplatform.compat.types import artifact as gca_artifact class Model(base_artifact.BaseArtifactSchema): @@ -34,6 +34,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -52,7 +53,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -66,6 +74,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) @@ -83,6 +92,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -101,7 +111,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -115,6 +132,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) @@ -138,6 +156,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, **kwargs, ): """Args: @@ -168,7 +187,14 @@ def __init__( description (str): Optional. Describes the purpose of the Artifact to be created. metadata (Dict): - Optional. Contains the metadata information that will be stored in the base. + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. **kwargs: Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ @@ -189,6 +215,7 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, + state=state, kwargs=kwargs, ) diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index 7317e2ddcb..4c88ba05a5 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -30,7 +30,9 @@ from google.cloud.aiplatform.metadata.types import system_types from google.cloud.aiplatform.metadata.types import utils +from google.cloud.aiplatform.compat.types import artifact as gca_artifact from google.cloud.aiplatform.compat.types import execution as gca_execution + from google.cloud.aiplatform_v1 import MetadataServiceClient from google.cloud.aiplatform_v1 import Artifact as GapicArtifact from google.cloud.aiplatform_v1 import Execution as GapicExecution @@ -43,7 +45,8 @@ _TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/{_TEST_METADATA_STORE}" # resource attributes -_TEST_STATE = gca_execution.Execution.State.STATE_UNSPECIFIED +_TEST_ARTIFACT_STATE = gca_artifact.Artifact.State.STATE_UNSPECIFIED +_TEST_EXECUTION_STATE = gca_execution.Execution.State.STATE_UNSPECIFIED _TEST_URI = "test-uri" _TEST_DISPLAY_NAME = "test-display-name" _TEST_SCHEMA_TITLE = "test.Example" @@ -114,6 +117,18 @@ def test_base_class_overrides_default_schema_title(self): artifact = base_artifact.BaseArtifactSchema(schema_title=_TEST_SCHEMA_TITLE) assert artifact.schema_title == _TEST_SCHEMA_TITLE + def test_base_class_overrides_default_state(self): + artifact = base_artifact.BaseArtifactSchema(state=_TEST_ARTIFACT_STATE) + assert artifact.state == _TEST_ARTIFACT_STATE + + def test_base_class_default_schema_title(self): + artifact = base_artifact.BaseArtifactSchema() + assert artifact.schema_title == "system.Artifact" + + def test_base_class_default_state(self): + artifact = base_artifact.BaseArtifactSchema() + assert artifact.state == gca_artifact.Artifact.State.LIVE + def test_base_class_overrides_resouce_id_from_resouce_name(self): artifact = base_artifact.BaseArtifactSchema(resource_name=_TEST_ARTIFACT_NAME) assert artifact.resource_id == _TEST_ARTIFACT_ID @@ -145,6 +160,7 @@ def test_create_is_called_with_default_parameters(self, create_artifact_mock): display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, + state=_TEST_ARTIFACT_STATE, ) artifact.create(metadata_store_id=_TEST_METADATA_STORE) create_artifact_mock.assert_called_once_with( @@ -156,6 +172,7 @@ def test_create_is_called_with_default_parameters(self, create_artifact_mock): assert kwargs["artifact"].display_name == _TEST_DISPLAY_NAME assert kwargs["artifact"].description == _TEST_DESCRIPTION assert kwargs["artifact"].metadata == _TEST_UPDATED_METADATA + assert kwargs["artifact"].state == _TEST_ARTIFACT_STATE class TestMetadataBaseExecutionSchema: @@ -171,6 +188,18 @@ def test_base_class_overrides_default_schema_title(self): execution = base_execution.BaseExecutionSchema(schema_title=_TEST_SCHEMA_TITLE) assert execution.schema_title == _TEST_SCHEMA_TITLE + def test_base_class_overrides_default_state(self): + execution = base_execution.BaseExecutionSchema(state=_TEST_EXECUTION_STATE) + assert execution.state == _TEST_EXECUTION_STATE + + def test_base_class_default_schema_title(self): + execution = base_execution.BaseExecutionSchema() + assert execution.schema_title == "system.ContainerExecution" + + def test_base_class_default_state(self): + execution = base_execution.BaseExecutionSchema() + assert execution.state == gca_execution.Execution.State.RUNNING + def test_base_class_overrides_resouce_id_from_resouce_name(self): execution = base_execution.BaseExecutionSchema( resource_name=_TEST_ARTIFACT_NAME @@ -186,13 +215,13 @@ def test_base_class_overrides_default_version(self): def test_base_class_init_remaining_parameters_are_assigned_correctly(self): execution = base_execution.BaseExecutionSchema( schema_title=_TEST_SCHEMA_TITLE, - state=_TEST_STATE, + state=_TEST_EXECUTION_STATE, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) assert execution.schema_title == _TEST_SCHEMA_TITLE - assert execution.state == _TEST_STATE + assert execution.state == _TEST_EXECUTION_STATE assert execution.display_name == _TEST_DISPLAY_NAME assert execution.description == _TEST_DESCRIPTION assert execution.metadata == _TEST_UPDATED_METADATA @@ -202,7 +231,7 @@ def test_create_is_called_with_default_parameters(self, create_execution_mock): aiplatform.init(project=_TEST_PROJECT) execution = base_execution.BaseExecutionSchema( schema_title=_TEST_SCHEMA_TITLE, - state=_TEST_STATE, + state=_TEST_EXECUTION_STATE, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, @@ -213,7 +242,7 @@ def test_create_is_called_with_default_parameters(self, create_execution_mock): ) _, _, kwargs = create_execution_mock.mock_calls[0] assert kwargs["execution"].schema_title == _TEST_SCHEMA_TITLE - assert kwargs["execution"].state == _TEST_STATE + assert kwargs["execution"].state == _TEST_EXECUTION_STATE assert kwargs["execution"].display_name == _TEST_DISPLAY_NAME assert kwargs["execution"].description == _TEST_DESCRIPTION assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA From b2f2a917b447cafaf96ae87f79095bab75e9252d Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 07:44:24 +0000 Subject: [PATCH 11/48] add support for start_execution --- google/cloud/aiplatform/metadata/metadata.py | 47 +++++++++++++------ .../metadata/types/base_execution.py | 39 +++++++++++++++ 2 files changed, 72 insertions(+), 14 deletions(-) diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 1b533d5176..26da9f0bf9 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -32,6 +32,7 @@ from google.cloud.aiplatform.metadata import experiment_resources from google.cloud.aiplatform.metadata import experiment_run_resource from google.cloud.aiplatform.tensorboard import tensorboard_resource +from google.cloud.aiplatform.metadata.types import base_execution _LOGGER = base.Logger(__name__) @@ -516,6 +517,7 @@ def start_execution( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, + base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, ) -> execution.Execution: """ Create and starts a new Metadata Execution or resumes a previously created Execution. @@ -565,6 +567,9 @@ def start_execution( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. + base_execution_schema (BaseExecutionSchema): + Optional. An instance of the BaseExecutionSchema class that can be provided instead of providing schema specific parameters. It overrides + the values provided for schema_title, resource_id, state, display_name, schema_version, description, and metadata. Returns: Execution: Instantiated representation of the managed Metadata Execution. @@ -605,22 +610,36 @@ def start_execution( run_execution.update(state=gca_execution.Execution.State.RUNNING) else: - if not schema_title: - raise ValueError( - "schema_title must be provided when starting a new Execution" + if base_execution_schema: + run_execution = execution.Execution.create( + display_name=base_execution_schema.display_name, + schema_title=base_execution_schema.schema_title, + schema_version=base_execution_schema.schema_version, + metadata=base_execution_schema.metadata, + description=base_execution_schema.description, + resource_id=base_execution_schema.resource_id, + state=base_execution_schema.state, + project=project, + location=location, + credentials=credentials, ) + else: + if not schema_title: + raise ValueError( + "schema_title must be provided when starting a new Execution" + ) - run_execution = execution.Execution.create( - display_name=display_name, - schema_title=schema_title, - schema_version=schema_version, - metadata=metadata, - description=description, - resource_id=resource_id, - project=project, - location=location, - credentials=credentials, - ) + run_execution = execution.Execution.create( + display_name=display_name, + schema_title=schema_title, + schema_version=schema_version, + metadata=metadata, + description=description, + resource_id=resource_id, + project=project, + location=location, + credentials=credentials, + ) if self.experiment_run: if self.experiment_run._is_legacy_experiment_run(): diff --git a/google/cloud/aiplatform/metadata/types/base_execution.py b/google/cloud/aiplatform/metadata/types/base_execution.py index 6b20c336ab..229d563c4f 100644 --- a/google/cloud/aiplatform/metadata/types/base_execution.py +++ b/google/cloud/aiplatform/metadata/types/base_execution.py @@ -19,6 +19,7 @@ from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata import constants from google.cloud.aiplatform.metadata import execution +from google.cloud.aiplatform.metadata import metadata from typing import Optional, Dict @@ -118,3 +119,41 @@ def create( credentials=credentials, ) return self.exectuion + + def start_execution( + self, + metadata_store_id: Optional[str] = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Create and starts a new Metadata Execution. + + Args: + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//executions/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Execution. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Execution. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Execution. Overrides + credentials set in aiplatform.init. + Returns: + Execution: Instantiated representation of the managed Metadata Execution. + + """ + self.exectuion = metadata._ExperimentTracker.start_execution( + base_execution_schema=self, + resume=False, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) + return self.exectuion From 1e85c2b4e97b423d74cb354e4fd440b635c6d87a Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 08:03:33 +0000 Subject: [PATCH 12/48] add support for metadata_store_id in start_execution --- google/cloud/aiplatform/metadata/metadata.py | 11 +++----- .../metadata/types/base_execution.py | 2 +- .../aiplatform/test_metadata_schema_types.py | 27 ++++++++++++++++++- 3 files changed, 31 insertions(+), 9 deletions(-) diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 26da9f0bf9..6ec675303f 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -513,6 +513,7 @@ def start_execution( metadata: Optional[Dict[str, Any]] = None, schema_version: Optional[str] = None, description: Optional[str] = None, + metadata_store_id: Optional[str] = "default", resume: bool = False, project: Optional[str] = None, location: Optional[str] = None, @@ -612,13 +613,8 @@ def start_execution( else: if base_execution_schema: run_execution = execution.Execution.create( - display_name=base_execution_schema.display_name, - schema_title=base_execution_schema.schema_title, - schema_version=base_execution_schema.schema_version, - metadata=base_execution_schema.metadata, - description=base_execution_schema.description, - resource_id=base_execution_schema.resource_id, - state=base_execution_schema.state, + base_execution_schema=base_execution_schema, + metadata_store_id=metadata_store_id, project=project, location=location, credentials=credentials, @@ -635,6 +631,7 @@ def start_execution( schema_version=schema_version, metadata=metadata, description=description, + metadata_store_id=metadata_store_id, resource_id=resource_id, project=project, location=location, diff --git a/google/cloud/aiplatform/metadata/types/base_execution.py b/google/cloud/aiplatform/metadata/types/base_execution.py index 229d563c4f..8c34e9ef1d 100644 --- a/google/cloud/aiplatform/metadata/types/base_execution.py +++ b/google/cloud/aiplatform/metadata/types/base_execution.py @@ -148,7 +148,7 @@ def start_execution( Execution: Instantiated representation of the managed Metadata Execution. """ - self.exectuion = metadata._ExperimentTracker.start_execution( + self.exectuion = metadata._ExperimentTracker().start_execution( base_execution_schema=self, resume=False, metadata_store_id=metadata_store_id, diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index 4c88ba05a5..d207fffca0 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -227,7 +227,9 @@ def test_base_class_init_remaining_parameters_are_assigned_correctly(self): assert execution.metadata == _TEST_UPDATED_METADATA @pytest.mark.usefixtures("create_execution_mock") - def test_create_is_called_with_default_parameters(self, create_execution_mock): + def test_create_method_calls_gapic_library_with_correct_parameters( + self, create_execution_mock + ): aiplatform.init(project=_TEST_PROJECT) execution = base_execution.BaseExecutionSchema( schema_title=_TEST_SCHEMA_TITLE, @@ -247,6 +249,29 @@ def test_create_is_called_with_default_parameters(self, create_execution_mock): assert kwargs["execution"].description == _TEST_DESCRIPTION assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA + @pytest.mark.usefixtures("create_execution_mock") + def test_start_execution_method_calls_gapic_library_with_correct_parameters( + self, create_execution_mock + ): + aiplatform.init(project=_TEST_PROJECT) + execution = base_execution.BaseExecutionSchema( + schema_title=_TEST_SCHEMA_TITLE, + state=_TEST_EXECUTION_STATE, + display_name=_TEST_DISPLAY_NAME, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + execution.start_execution(metadata_store_id=_TEST_METADATA_STORE) + create_execution_mock.assert_called_once_with( + parent=_TEST_PARENT, execution=mock.ANY, execution_id=None + ) + _, _, kwargs = create_execution_mock.mock_calls[0] + assert kwargs["execution"].schema_title == _TEST_SCHEMA_TITLE + assert kwargs["execution"].state == _TEST_EXECUTION_STATE + assert kwargs["execution"].display_name == _TEST_DISPLAY_NAME + assert kwargs["execution"].description == _TEST_DESCRIPTION + assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA + class TestMetadataGoogleTypes: def setup_method(self): From a93bdcdfef391d428b1e99db32bdf54c1fc2ad59 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 08:39:00 +0000 Subject: [PATCH 13/48] lint and docs update based on review feedback --- google/cloud/aiplatform/metadata/artifact.py | 28 ++++++-- google/cloud/aiplatform/metadata/execution.py | 2 +- google/cloud/aiplatform/metadata/metadata.py | 2 +- .../{types => schema}/base_artifact.py | 1 + .../{types => schema}/base_execution.py | 2 + .../google_schema.py} | 7 +- .../system_schema.py} | 22 +++--- .../metadata/{types => schema}/utils.py | 5 +- .../aiplatform/test_metadata_schema_types.py | 70 ++++++++++--------- 9 files changed, 77 insertions(+), 62 deletions(-) rename google/cloud/aiplatform/metadata/{types => schema}/base_artifact.py (98%) rename google/cloud/aiplatform/metadata/{types => schema}/base_execution.py (98%) rename google/cloud/aiplatform/metadata/{types/google_types.py => schema/google_schema.py} (98%) rename google/cloud/aiplatform/metadata/{types/system_types.py => schema/system_schema.py} (97%) rename google/cloud/aiplatform/metadata/{types => schema}/utils.py (97%) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 8d093bd47a..78da1abe51 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -15,6 +15,7 @@ # limitations under the License. # +from multiprocessing.sharedctypes import Value from typing import Optional, Dict, Union import proto @@ -32,7 +33,7 @@ from google.cloud.aiplatform.metadata import resource from google.cloud.aiplatform.metadata import utils as metadata_utils from google.cloud.aiplatform.utils import rest_utils -from google.cloud.aiplatform.metadata.types import base_artifact +from google.cloud.aiplatform.metadata.schema import base_artifact _LOGGER = base.Logger(__name__) @@ -176,7 +177,6 @@ def _create( """ api_client = cls._instantiate_client(location=location, credentials=credentials) - api_client = cls._instantiate_client(location=location, credentials=credentials) parent = utils.full_resource_name( resource_name=metadata_store_id, @@ -269,24 +269,30 @@ def create( Args: schema_title (str): - Optional. schema_title identifies the schema title used by the Artifact. + Optional. schema_title identifies the schema title used by the + Artifact. One of schema_title or base_artifact_schema needs to + be set. base_artifact_schema overrides this parameter. Please reference https://cloud.google.com/vertex-ai/docs/ml-metadata/system-schemas. resource_id (str): Optional. The portion of the Artifact name with - the format. This is globally unique in a metadataStore: + the format. base_artifact_schema overrides this parameter. This + is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. + artifact file. base_artifact_schema overrides this parameter. display_name (str): - Optional. The user-defined name of the Artifact. + Optional. The user-defined name of the Artifact. base_artifact_schema overrides this parameter. schema_version (str): Optional. schema_version specifies the version used by the Artifact. If not set, defaults to use the latest version. + base_artifact_schema overrides this parameter. description (str): Optional. Describes the purpose of the Artifact to be created. + base_artifact_schema overrides this parameter. metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. + Optional. Contains the metadata information that will be stored in the + Artifact. base_artifact_schema overrides this parameter. state (google.cloud.gapic.types.Artifact.State): Optional. The state of this Artifact. This is a property of the Artifact, and does not imply or @@ -294,6 +300,7 @@ def create( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. + base_artifact_schema overrides this parameter. metadata_store_id (str): Optional. The portion of the resource name with the format: @@ -314,6 +321,9 @@ def create( Returns: Artifact: Instantiated representation of the managed Metadata Artifact. + + Raises: + ValueError: If neither schema_title nor base_artifact_schema is provided. """ if base_artifact_schema: return cls._create( @@ -330,6 +340,10 @@ def create( location=location, credentials=credentials, ) + if not schema_title: + raise ValueError( + "One of schema_title or base_artifact_schema should be provided." + ) return cls._create( resource_id=resource_id, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 4536374ac5..a8f8fb8dba 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -31,7 +31,7 @@ from google.cloud.aiplatform.metadata import artifact from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform.metadata import resource -from google.cloud.aiplatform.metadata.types import base_execution +from google.cloud.aiplatform.metadata.schema import base_execution class Execution(resource._Resource): diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 6ec675303f..1a00b39612 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -32,7 +32,7 @@ from google.cloud.aiplatform.metadata import experiment_resources from google.cloud.aiplatform.metadata import experiment_run_resource from google.cloud.aiplatform.tensorboard import tensorboard_resource -from google.cloud.aiplatform.metadata.types import base_execution +from google.cloud.aiplatform.metadata.schema import base_execution _LOGGER = base.Logger(__name__) diff --git a/google/cloud/aiplatform/metadata/types/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py similarity index 98% rename from google/cloud/aiplatform/metadata/types/base_artifact.py rename to google/cloud/aiplatform/metadata/schema/base_artifact.py index 9b33f50c15..949236dd18 100644 --- a/google/cloud/aiplatform/metadata/types/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -86,6 +86,7 @@ def __init__( self.resource_id = None if resource_name: # Temporary work around while Artifact.create takes resource_id instead of resource_name + # TODO: switch to using resouce_name only when create resouce supports it. self.resource_id = resource_name.split("/")[-1] self.uri = uri diff --git a/google/cloud/aiplatform/metadata/types/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py similarity index 98% rename from google/cloud/aiplatform/metadata/types/base_execution.py rename to google/cloud/aiplatform/metadata/schema/base_execution.py index 8c34e9ef1d..b394f98f4a 100644 --- a/google/cloud/aiplatform/metadata/types/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -73,8 +73,10 @@ def __init__( self.state = state self.resource_id = None + if resource_name: # Temporary work around while Execution.create takes resource_id instead of resource_name + # TODO: switch to using resouce_name only when create execution supports it. self.resource_id = resource_name.split("/")[-1] self.display_name = display_name diff --git a/google/cloud/aiplatform/metadata/types/google_types.py b/google/cloud/aiplatform/metadata/schema/google_schema.py similarity index 98% rename from google/cloud/aiplatform/metadata/types/google_types.py rename to google/cloud/aiplatform/metadata/schema/google_schema.py index cd3c990555..36b04983de 100644 --- a/google/cloud/aiplatform/metadata/types/google_types.py +++ b/google/cloud/aiplatform/metadata/schema/google_schema.py @@ -14,10 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Optional, Dict, NamedTuple, List -from dataclasses import dataclass -from google.cloud.aiplatform.metadata.types import base_artifact -from google.cloud.aiplatform.metadata.types import utils +from typing import Optional, Dict +from google.cloud.aiplatform.metadata.schema import base_artifact +from google.cloud.aiplatform.metadata.schema import utils from google.cloud.aiplatform.compat.types import artifact as gca_artifact diff --git a/google/cloud/aiplatform/metadata/types/system_types.py b/google/cloud/aiplatform/metadata/schema/system_schema.py similarity index 97% rename from google/cloud/aiplatform/metadata/types/system_types.py rename to google/cloud/aiplatform/metadata/schema/system_schema.py index b307d1dd5e..fa7b509f94 100644 --- a/google/cloud/aiplatform/metadata/types/system_types.py +++ b/google/cloud/aiplatform/metadata/schema/system_schema.py @@ -14,15 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Optional, Dict, List +from typing import Optional, Dict from google.cloud.aiplatform.compat.types import execution as gca_execution -from google.cloud.aiplatform.metadata.types import base_artifact -from google.cloud.aiplatform.metadata.types import base_execution +from google.cloud.aiplatform.metadata.schema import base_artifact +from google.cloud.aiplatform.metadata.schema import base_execution from google.cloud.aiplatform.compat.types import artifact as gca_artifact class Model(base_artifact.BaseArtifactSchema): - """Schemaless Artifact Type to store Markdown file.""" + """Schemaless Artifact to store Markdown file.""" SCHEMA_TITLE = "system.Model" @@ -138,7 +138,7 @@ def __init__( class Metrics(base_artifact.BaseArtifactSchema): - """Artifact type for scalar metrics.""" + """Artifact schema for scalar metrics.""" SCHEMA_TITLE = "system.Metrics" @@ -221,7 +221,7 @@ def __init__( class ContainerExecution(base_execution.BaseExecutionSchema): - """Execution type for a container execution.""" + """Execution schema for a container execution.""" SCHEMA_TITLE = "system.ContainerExecution" @@ -269,7 +269,7 @@ def __init__( class ImporterExecution(base_execution.BaseExecutionSchema): - """Execution type for a importer execution.""" + """Execution schema for a importer execution.""" SCHEMA_TITLE = "system.ImporterExecution" @@ -317,7 +317,7 @@ def __init__( class ResolverExecution(base_execution.BaseExecutionSchema): - """Execution type for a resolver execution.""" + """Execution schema for a resolver execution.""" SCHEMA_TITLE = "system.ResolverExecution" @@ -365,7 +365,7 @@ def __init__( class DagExecution(base_execution.BaseExecutionSchema): - """Execution type for a dag execution.""" + """Execution schema for a dag execution.""" SCHEMA_TITLE = "system.DagExecution" @@ -413,7 +413,7 @@ def __init__( class CustomJobExecution(base_execution.BaseExecutionSchema): - """Execution type for a custom job execution.""" + """Execution schema for a custom job execution.""" SCHEMA_TITLE = "system.CustomJobExecution" @@ -461,7 +461,7 @@ def __init__( class Run(base_execution.BaseExecutionSchema): - """Execution type for root run execution.""" + """Execution schema for root run execution.""" SCHEMA_TITLE = "system.Run" diff --git a/google/cloud/aiplatform/metadata/types/utils.py b/google/cloud/aiplatform/metadata/schema/utils.py similarity index 97% rename from google/cloud/aiplatform/metadata/types/utils.py rename to google/cloud/aiplatform/metadata/schema/utils.py index 90c360bf01..4269f0d10c 100644 --- a/google/cloud/aiplatform/metadata/types/utils.py +++ b/google/cloud/aiplatform/metadata/schema/utils.py @@ -14,11 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import namedtuple -from typing import Optional, Dict, NamedTuple, List +from typing import Optional, Dict, List from dataclasses import dataclass -from google.cloud.aiplatform.metadata import artifact -from itertools import zip_longest @dataclass diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema_types.py index d207fffca0..b5f9cfefe8 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema_types.py @@ -24,11 +24,11 @@ from google.cloud import aiplatform from google.cloud.aiplatform import initializer from google.cloud.aiplatform.metadata import metadata -from google.cloud.aiplatform.metadata.types import base_artifact -from google.cloud.aiplatform.metadata.types import base_execution -from google.cloud.aiplatform.metadata.types import google_types -from google.cloud.aiplatform.metadata.types import system_types -from google.cloud.aiplatform.metadata.types import utils +from google.cloud.aiplatform.metadata.schema import base_artifact +from google.cloud.aiplatform.metadata.schema import base_execution +from google.cloud.aiplatform.metadata.schema import google_schema +from google.cloud.aiplatform.metadata.schema import system_schema +from google.cloud.aiplatform.metadata.schema import utils from google.cloud.aiplatform.compat.types import artifact as gca_artifact from google.cloud.aiplatform.compat.types import execution as gca_execution @@ -273,7 +273,7 @@ def test_start_execution_method_calls_gapic_library_with_correct_parameters( assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA -class TestMetadataGoogleTypes: +class TestMetadataGoogleSchema: def setup_method(self): reload(initializer) reload(metadata) @@ -283,15 +283,15 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_vertex_dataset_schema_title_is_set_correctly(self): - artifact = google_types.VertexDataset() + artifact = google_schema.VertexDataset() assert artifact.schema_title == "google.VertexDataset" def test_vertex_dataset_resouce_name_is_set_in_metadata(self): - artifact = google_types.VertexDataset(dataset_name=_TEST_ARTIFACT_NAME) + artifact = google_schema.VertexDataset(dataset_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_dataset_constructor_parameters_are_set_correctly(self): - artifact = google_types.VertexDataset( + artifact = google_schema.VertexDataset( dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -306,15 +306,15 @@ def test_vertex_dataset_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_model_schema_title_is_set_correctly(self): - artifact = google_types.VertexModel() + artifact = google_schema.VertexModel() assert artifact.schema_title == "google.VertexModel" def test_vertex_model_resouce_name_is_set_in_metadata(self): - artifact = google_types.VertexModel(vertex_model_name=_TEST_ARTIFACT_NAME) + artifact = google_schema.VertexModel(vertex_model_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_model_constructor_parameters_are_set_correctly(self): - artifact = google_types.VertexModel( + artifact = google_schema.VertexModel( vertex_model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -329,15 +329,17 @@ def test_vertex_model_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_endpoint_schema_title_is_set_correctly(self): - artifact = google_types.VertexEndpoint() + artifact = google_schema.VertexEndpoint() assert artifact.schema_title == "google.VertexEndpoint" def test_vertex_endpoint_resouce_name_is_set_in_metadata(self): - artifact = google_types.VertexEndpoint(vertex_endpoint_name=_TEST_ARTIFACT_NAME) + artifact = google_schema.VertexEndpoint( + vertex_endpoint_name=_TEST_ARTIFACT_NAME + ) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): - artifact = google_types.VertexEndpoint( + artifact = google_schema.VertexEndpoint( vertex_endpoint_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -361,7 +363,7 @@ def test_unmanaged_container_model_title_is_set_correctly(self): container_spec = utils.ContainerSpec( image_uri="gcr.io/test_container_image_uri" ) - artifact = google_types.UnmanagedContainerModel( + artifact = google_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, ) @@ -377,7 +379,7 @@ def test_unmanaged_container_model_resouce_name_is_set_in_metadata(self): container_spec = utils.ContainerSpec( image_uri="gcr.io/test_container_image_uri" ) - artifact = google_types.UnmanagedContainerModel( + artifact = google_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, unmanaged_container_model_name=_TEST_ARTIFACT_NAME, @@ -395,7 +397,7 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self image_uri="gcr.io/test_container_image_uri" ) - artifact = google_types.UnmanagedContainerModel( + artifact = google_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, unmanaged_container_model_name=_TEST_ARTIFACT_NAME, @@ -412,7 +414,7 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self assert artifact.schema_version == _TEST_SCHEMA_VERSION -class TestMetadataSystemTypes: +class TestMetadataSystemSchema: def setup_method(self): reload(initializer) reload(metadata) @@ -422,15 +424,15 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_system_dataset_schema_title_is_set_correctly(self): - artifact = system_types.Dataset() + artifact = system_schema.Dataset() assert artifact.schema_title == "system.Dataset" def test_system_dataset_resouce_name_is_set_in_metadata(self): - artifact = system_types.Dataset(dataset_name=_TEST_ARTIFACT_NAME) + artifact = system_schema.Dataset(dataset_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_dataset_constructor_parameters_are_set_correctly(self): - artifact = system_types.Dataset( + artifact = system_schema.Dataset( dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -445,15 +447,15 @@ def test_system_dataset_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_system_model_schema_title_is_set_correctly(self): - artifact = system_types.Model() + artifact = system_schema.Model() assert artifact.schema_title == "system.Model" def test_system_model_resouce_name_is_set_in_metadata(self): - artifact = system_types.Model(model_name=_TEST_ARTIFACT_NAME) + artifact = system_schema.Model(model_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_model_constructor_parameters_are_set_correctly(self): - artifact = system_types.Model( + artifact = system_schema.Model( model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -468,15 +470,15 @@ def test_system_model_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_system_metrics_schema_title_is_set_correctly(self): - artifact = system_types.Metrics() + artifact = system_schema.Metrics() assert artifact.schema_title == "system.Metrics" def test_system_metrics_resouce_name_is_set_in_metadata(self): - artifact = system_types.Metrics(metrics_name=_TEST_ARTIFACT_NAME) + artifact = system_schema.Metrics(metrics_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_metrics_constructor_parameters_are_set_correctly(self): - artifact = system_types.Metrics( + artifact = system_schema.Metrics( metrics_name=_TEST_ARTIFACT_NAME, accuracy=0.1, precision=0.2, @@ -503,27 +505,27 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): # Test system.Execution Schemas def test_system_container_execution_schema_title_is_set_correctly(self): - execution = system_types.ContainerExecution() + execution = system_schema.ContainerExecution() assert execution.schema_title == "system.ContainerExecution" def test_system_importer_execution_schema_title_is_set_correctly(self): - execution = system_types.ImporterExecution() + execution = system_schema.ImporterExecution() assert execution.schema_title == "system.ImporterExecution" def test_system_resolver_execution_schema_title_is_set_correctly(self): - execution = system_types.ResolverExecution() + execution = system_schema.ResolverExecution() assert execution.schema_title == "system.ResolverExecution" def test_system_dag_execution_schema_title_is_set_correctly(self): - execution = system_types.DagExecution() + execution = system_schema.DagExecution() assert execution.schema_title == "system.DagExecution" def test_system_custom_job_execution_schema_title_is_set_correctly(self): - execution = system_types.CustomJobExecution() + execution = system_schema.CustomJobExecution() assert execution.schema_title == "system.CustomJobExecution" def test_system_run_execution_schema_title_is_set_correctly(self): - execution = system_types.Run() + execution = system_schema.Run() assert execution.schema_title == "system.Run" From 743a97131ed2e5c9d096888181b0c78b4a4f90c5 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 09:37:03 +0000 Subject: [PATCH 14/48] Add e2e integraton tests and lint update --- google/cloud/aiplatform/metadata/artifact.py | 1 - .../metadata/schema/base_execution.py | 2 +- .../aiplatform/test_e2e_metadata_schema.py | 171 ++++++++++++++++++ ...xperiments.py => test_experiments copy.py} | 0 4 files changed, 172 insertions(+), 2 deletions(-) create mode 100644 tests/system/aiplatform/test_e2e_metadata_schema.py rename tests/system/aiplatform/{test_experiments.py => test_experiments copy.py} (100%) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 78da1abe51..dd12223822 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -15,7 +15,6 @@ # limitations under the License. # -from multiprocessing.sharedctypes import Value from typing import Optional, Dict, Union import proto diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index b394f98f4a..2a6ec02bef 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -73,7 +73,7 @@ def __init__( self.state = state self.resource_id = None - + if resource_name: # Temporary work around while Execution.create takes resource_id instead of resource_name # TODO: switch to using resouce_name only when create execution supports it. diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py new file mode 100644 index 0000000000..60a3894b73 --- /dev/null +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import pytest + +from google.cloud import aiplatform +from tests.system.aiplatform import e2e_base +from google.cloud.aiplatform.metadata.schema import google_schema +from google.cloud.aiplatform.metadata.schema import system_schema +from google.cloud.aiplatform.metadata.schema import base_artifact +from google.cloud.aiplatform.metadata.schema import base_execution +import json + + +@pytest.mark.usefixtures("tear_down_resources") +class TestMetadataSchema(e2e_base.TestEndToEnd): + def test_artifact_creation_using_schema_base_class(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("base-artifact")[:30] + artifact_uri = self._make_display_name("base-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("base-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = base_artifact.BaseArtifactSchema( + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.schema_title == "system.Artifact" + assert artifact.description == artifact_description + assert "/metadataStores/default/artifacts/" in artifact.resource_name + + def test_system_dataset_artifact_create(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("dataset-artifact")[:30] + artifact_uri = self._make_display_name("dataset-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("dataset-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = system_schema.Dataset( + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.schema_title == "system.Dataset" + assert artifact.description == artifact_description + assert "/metadataStores/default/artifacts/" in artifact.resource_name + + def test_google_dataset_artifact_create(self): + + # Truncating the name because of resource id constraints from the service + artifact_display_name = self._make_display_name("ds-artifact")[:30] + artifact_uri = self._make_display_name("vertex-dataset-uri") + artifact_metadata = {"test_property": "test_value"} + artifact_description = self._make_display_name("vertex-dataset-description") + dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/{artifact_display_name}" + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + artifact = google_schema.VertexDataset( + dataset_name=dataset_name, + display_name=artifact_display_name, + uri=artifact_uri, + metadata=artifact_metadata, + description=artifact_description, + ).create() + expected_metadata = artifact_metadata + expected_metadata["resourceName"] = dataset_name + + assert artifact.display_name == artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) + assert artifact.schema_title == "google.VertexDataset" + assert artifact.description == artifact_description + assert "/metadataStores/default/artifacts/" in artifact.resource_name + + def test_execution_create_using_schema_base_class(self): + + # Truncating the name because of resource id constraints from the service + execution_display_name = self._make_display_name("base-execution")[:30] + execution_description = self._make_display_name("base-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + execution = base_execution.BaseExecutionSchema( + display_name=execution_display_name, + description=execution_description, + ).create() + + assert execution.display_name == execution_display_name + assert execution.schema_title == "system.ContainerExecution" + assert execution.description == execution_description + assert "/metadataStores/default/executions/" in execution.resource_name + + def test_execution_create_using_system_schema_class(self): + # Truncating the name because of resource id constraints from the service + execution_display_name = self._make_display_name("base-execution")[:30] + execution_description = self._make_display_name("base-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + execution = system_schema.CustomJobExecution( + display_name=execution_display_name, + description=execution_description, + ).create() + + assert execution.display_name == execution_display_name + assert execution.schema_title == "system.CustomJobExecution" + assert execution.description == execution_description + assert "/metadataStores/default/executions/" in execution.resource_name + + def test_execution_start_execution_using_system_schema_class(self): + # Truncating the name because of resource id constraints from the service + execution_display_name = self._make_display_name("base-execution")[:30] + execution_description = self._make_display_name("base-description") + + aiplatform.init( + project=e2e_base._PROJECT, + location=e2e_base._LOCATION, + ) + + execution = system_schema.ContainerExecution( + display_name=execution_display_name, + description=execution_description, + ).start_execution() + + assert execution.display_name == execution_display_name + assert execution.schema_title == "system.ContainerExecution" + assert execution.description == execution_description + assert "/metadataStores/default/executions/" in execution.resource_name diff --git a/tests/system/aiplatform/test_experiments.py b/tests/system/aiplatform/test_experiments copy.py similarity index 100% rename from tests/system/aiplatform/test_experiments.py rename to tests/system/aiplatform/test_experiments copy.py From edaf3e62c31860d0d75ce9f37b65caa9550439a8 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 09:18:47 -0700 Subject: [PATCH 15/48] Update google/cloud/aiplatform/metadata/artifact.py Co-authored-by: sasha-gitg <44654632+sasha-gitg@users.noreply.github.com> --- google/cloud/aiplatform/metadata/artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index dd12223822..33705fe76e 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -314,7 +314,7 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Artifact. Overrides credentials set in aiplatform.init. - base_artifact_schema (BaseArtifactType): + base_artifact_schema (BaseArtifactSchema): Optional. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. From fb18610ca6e2ac2c4773e7d624dbe9ed2ac1eb41 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 16:43:40 +0000 Subject: [PATCH 16/48] remove the duplicate test_experiments_copy.py --- .../aiplatform/{test_experiments copy.py => test_experiments.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/system/aiplatform/{test_experiments copy.py => test_experiments.py} (100%) diff --git a/tests/system/aiplatform/test_experiments copy.py b/tests/system/aiplatform/test_experiments.py similarity index 100% rename from tests/system/aiplatform/test_experiments copy.py rename to tests/system/aiplatform/test_experiments.py From f6557416ccf0a001d85a459a7f79d2c59dbe9d3e Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 18:35:34 +0000 Subject: [PATCH 17/48] refactor based on code review feedback --- google/cloud/aiplatform/metadata/artifact.py | 105 ++-- .../metadata/schema/base_artifact.py | 8 +- .../metadata/schema/base_execution.py | 4 +- ...le_schema.py => google_artifact_schema.py} | 5 +- .../metadata/schema/system_artifact_schema.py | 220 ++++++++ .../schema/system_execution_schema.py | 165 ++++++ .../metadata/schema/system_schema.py | 508 ------------------ .../aiplatform/test_e2e_metadata_schema.py | 19 +- ...chema_types.py => test_metadata_schema.py} | 78 ++- 9 files changed, 502 insertions(+), 610 deletions(-) rename google/cloud/aiplatform/metadata/schema/{google_schema.py => google_artifact_schema.py} (99%) create mode 100644 google/cloud/aiplatform/metadata/schema/system_artifact_schema.py create mode 100644 google/cloud/aiplatform/metadata/schema/system_execution_schema.py delete mode 100644 google/cloud/aiplatform/metadata/schema/system_schema.py rename tests/unit/aiplatform/{test_metadata_schema_types.py => test_metadata_schema.py} (91%) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 33705fe76e..fb1469d667 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -15,13 +15,13 @@ # limitations under the License. # -from typing import Optional, Dict, Union - import proto +from typing import Optional, Dict, Union from google.auth import credentials as auth_credentials from google.cloud.aiplatform import base + from google.cloud.aiplatform import models from google.cloud.aiplatform import utils from google.cloud.aiplatform.compat.types import artifact as gca_artifact @@ -31,8 +31,9 @@ from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform.metadata import resource from google.cloud.aiplatform.metadata import utils as metadata_utils -from google.cloud.aiplatform.utils import rest_utils + from google.cloud.aiplatform.metadata.schema import base_artifact +from google.cloud.aiplatform.utils import rest_utils _LOGGER = base.Logger(__name__) @@ -249,8 +250,8 @@ def _list_resources( @classmethod def create( cls, + schema_title: str, *, - schema_title: Optional[str] = None, resource_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, @@ -262,36 +263,28 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - base_artifact_schema: Optional[base_artifact.BaseArtifactSchema] = None, ) -> "Artifact": """Creates a new Metadata Artifact. - Args: schema_title (str): - Optional. schema_title identifies the schema title used by the - Artifact. One of schema_title or base_artifact_schema needs to - be set. base_artifact_schema overrides this parameter. + Required. schema_title identifies the schema title used by the Artifact. Please reference https://cloud.google.com/vertex-ai/docs/ml-metadata/system-schemas. resource_id (str): Optional. The portion of the Artifact name with - the format. base_artifact_schema overrides this parameter. This - is globally unique in a metadataStore: + the format. This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. base_artifact_schema overrides this parameter. + artifact file. display_name (str): - Optional. The user-defined name of the Artifact. base_artifact_schema overrides this parameter. + Optional. The user-defined name of the Artifact. schema_version (str): Optional. schema_version specifies the version used by the Artifact. If not set, defaults to use the latest version. - base_artifact_schema overrides this parameter. description (str): Optional. Describes the purpose of the Artifact to be created. - base_artifact_schema overrides this parameter. metadata (Dict): - Optional. Contains the metadata information that will be stored in the - Artifact. base_artifact_schema overrides this parameter. + Optional. Contains the metadata information that will be stored in the Artifact. state (google.cloud.gapic.types.Artifact.State): Optional. The state of this Artifact. This is a property of the Artifact, and does not imply or @@ -299,7 +292,6 @@ def create( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - base_artifact_schema overrides this parameter. metadata_store_id (str): Optional. The portion of the resource name with the format: @@ -314,36 +306,9 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Artifact. Overrides credentials set in aiplatform.init. - base_artifact_schema (BaseArtifactSchema): - Optional. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides - the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. - Returns: Artifact: Instantiated representation of the managed Metadata Artifact. - - Raises: - ValueError: If neither schema_title nor base_artifact_schema is provided. """ - if base_artifact_schema: - return cls._create( - resource_id=base_artifact_schema.resource_id, - schema_title=base_artifact_schema.schema_title, - uri=base_artifact_schema.uri, - display_name=base_artifact_schema.display_name, - schema_version=base_artifact_schema.schema_version, - description=base_artifact_schema.description, - metadata=base_artifact_schema.metadata, - state=base_artifact_schema.state, - metadata_store_id=metadata_store_id, - project=project, - location=location, - credentials=credentials, - ) - if not schema_title: - raise ValueError( - "One of schema_title or base_artifact_schema should be provided." - ) - return cls._create( resource_id=resource_id, schema_title=schema_title, @@ -359,6 +324,56 @@ def create( credentials=credentials, ) + @classmethod + def create_from_base_schema( + cls, + *, + base_artifact_schema: base_artifact.BaseArtifactSchema, + metadata_store_id: Optional[str] = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ) -> "Artifact": + """Creates a new Metadata Artifact from a BaseArtifactSchema class instance. + + Args: + base_artifact_schema (BaseArtifactSchema): + Required. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides + the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//artifacts/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Artifact. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Artifact. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Artifact. Overrides + credentials set in aiplatform.init. + + Returns: + Artifact: Instantiated representation of the managed Metadata Artifact. + """ + + return cls._create( + resource_id=base_artifact_schema.resource_id, + schema_title=base_artifact_schema.schema_title, + uri=base_artifact_schema.uri, + display_name=base_artifact_schema.display_name, + schema_version=base_artifact_schema.schema_version, + description=base_artifact_schema.description, + metadata=base_artifact_schema.metadata, + state=base_artifact_schema.state, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, + ) + @property def uri(self) -> Optional[str]: "Uri for this Artifact." diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 949236dd18..26545bba0e 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -15,11 +15,13 @@ # limitations under the License. # +from typing import Optional, Dict + from google.auth import credentials as auth_credentials + +from google.cloud.aiplatform.compat.types import artifact as gca_artifact from google.cloud.aiplatform.metadata import artifact from google.cloud.aiplatform.metadata import constants -from google.cloud.aiplatform.compat.types import artifact as gca_artifact -from typing import Optional, Dict class BaseArtifactSchema(object): @@ -124,7 +126,7 @@ def create( Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ - self.artifact = artifact.Artifact.create( + self.artifact = artifact.Artifact.create_from_base_schema( base_artifact_schema=self, metadata_store_id=metadata_store_id, project=project, diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 2a6ec02bef..c237c0d56a 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -15,12 +15,14 @@ # limitations under the License. # +from typing import Optional, Dict + from google.auth import credentials as auth_credentials + from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata import constants from google.cloud.aiplatform.metadata import execution from google.cloud.aiplatform.metadata import metadata -from typing import Optional, Dict class BaseExecutionSchema(object): diff --git a/google/cloud/aiplatform/metadata/schema/google_schema.py b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py similarity index 99% rename from google/cloud/aiplatform/metadata/schema/google_schema.py rename to google/cloud/aiplatform/metadata/schema/google_artifact_schema.py index 36b04983de..7dd2fa088c 100644 --- a/google/cloud/aiplatform/metadata/schema/google_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py @@ -13,11 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# + from typing import Optional, Dict + +from google.cloud.aiplatform.compat.types import artifact as gca_artifact from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import utils -from google.cloud.aiplatform.compat.types import artifact as gca_artifact class VertexDataset(base_artifact.BaseArtifactSchema): diff --git a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py new file mode 100644 index 0000000000..e24c7e7042 --- /dev/null +++ b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional, Dict + +from google.cloud.aiplatform.compat.types import artifact as gca_artifact +from google.cloud.aiplatform.metadata.schema import base_artifact + + +class Model(base_artifact.BaseArtifactSchema): + """Schemaless Artifact to store Markdown file.""" + + SCHEMA_TITLE = "system.Model" + + def __init__( + self, + model_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + **kwargs, + ): + """Args: + model_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = model_name + super(Model, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=model_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + state=state, + kwargs=kwargs, + ) + + +class Dataset(base_artifact.BaseArtifactSchema): + """An artifact representing a system Dataset.""" + + SCHEMA_TITLE = "system.Dataset" + + def __init__( + self, + dataset_name: Optional[str] = None, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + **kwargs, + ): + """Args: + dataset_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = dataset_name + super(Dataset, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=dataset_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + state=state, + kwargs=kwargs, + ) + + +class Metrics(base_artifact.BaseArtifactSchema): + """Artifact schema for scalar metrics.""" + + SCHEMA_TITLE = "system.Metrics" + + def __init__( + self, + metrics_name: Optional[str] = None, + accuracy: Optional[float] = 0, + precision: Optional[float] = 0, + recall: Optional[float] = 0, + f1score: Optional[float] = 0, + mean_absolute_error: Optional[float] = 0, + mean_squared_error: Optional[float] = 0, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + **kwargs, + ): + """Args: + metrics_name (str): + Optional. The resource name of the Artifact following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + accuracy (float): + Optional. Defaults to zero. + precision (float): + Optional. Defaults to zero. + recall (float): + Optional. Defaults to zero. + f1score (float): + Optional. Defaults to zero. + mean_absolute_error (float): + Optional. Defaults to zero. + mean_squared_error (float): + Optional. Defaults to zero. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. + **kwargs: + Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["accuracy"] = accuracy + extended_metadata["precision"] = precision + extended_metadata["recall"] = recall + extended_metadata["f1score"] = f1score + extended_metadata["mean_absolute_error"] = mean_absolute_error + extended_metadata["mean_squared_error"] = mean_squared_error + extended_metadata["resourceName"] = metrics_name + + super(Metrics, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=metrics_name, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + state=state, + kwargs=kwargs, + ) diff --git a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py new file mode 100644 index 0000000000..c66dd99fc6 --- /dev/null +++ b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional, Dict + +from google.cloud.aiplatform.compat.types import execution as gca_execution +from google.cloud.aiplatform.metadata.schema import base_execution + + +class ContainerExecution(base_execution.BaseExecutionSchema): + """Execution schema for a container execution.""" + + SCHEMA_TITLE = "system.ContainerExecution" + + def __init__( + self, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(ContainerExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class CustomJobExecution(base_execution.BaseExecutionSchema): + """Execution schema for a custom job execution.""" + + SCHEMA_TITLE = "system.CustomJobExecution" + + def __init__( + self, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(CustomJobExecution, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) + + +class Run(base_execution.BaseExecutionSchema): + """Execution schema for root run execution.""" + + SCHEMA_TITLE = "system.Run" + + def __init__( + self, + state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + execution_name: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + **kwargs, + ): + """Args: + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + execution_name (str): + Optional. The resource name of the Execution following the format as follows. + This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + **kwargs: + Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. + """ + extended_metadata = metadata or {} + extended_metadata["resourceName"] = execution_name + super(Run, self).__init__( + schema_title=self.SCHEMA_TITLE, + resource_name=execution_name, + state=state, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + kwargs=kwargs, + ) diff --git a/google/cloud/aiplatform/metadata/schema/system_schema.py b/google/cloud/aiplatform/metadata/schema/system_schema.py deleted file mode 100644 index fa7b509f94..0000000000 --- a/google/cloud/aiplatform/metadata/schema/system_schema.py +++ /dev/null @@ -1,508 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Optional, Dict -from google.cloud.aiplatform.compat.types import execution as gca_execution -from google.cloud.aiplatform.metadata.schema import base_artifact -from google.cloud.aiplatform.metadata.schema import base_execution -from google.cloud.aiplatform.compat.types import artifact as gca_artifact - - -class Model(base_artifact.BaseArtifactSchema): - """Schemaless Artifact to store Markdown file.""" - - SCHEMA_TITLE = "system.Model" - - def __init__( - self, - model_name: Optional[str] = None, - uri: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - description: Optional[str] = None, - metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, - ): - """Args: - model_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. - display_name (str): - Optional. The user-defined name of the base. - schema_version (str): - Optional. schema_version specifies the version used by the base. - If not set, defaults to use the latest version. - description (str): - Optional. Describes the purpose of the Artifact to be created. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. - state (google.cloud.gapic.types.Artifact.State): - Optional. The state of this Artifact. This is a - property of the Artifact, and does not imply or - capture any ongoing process. This property is - managed by clients (such as Vertex AI - Pipelines), and the system does not prescribe or - check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = model_name - super(Model, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=model_name, - uri=uri, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - state=state, - kwargs=kwargs, - ) - - -class Dataset(base_artifact.BaseArtifactSchema): - """An artifact representing a system Dataset.""" - - SCHEMA_TITLE = "system.Dataset" - - def __init__( - self, - dataset_name: Optional[str] = None, - uri: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - description: Optional[str] = None, - metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, - ): - """Args: - dataset_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. - display_name (str): - Optional. The user-defined name of the base. - schema_version (str): - Optional. schema_version specifies the version used by the base. - If not set, defaults to use the latest version. - description (str): - Optional. Describes the purpose of the Artifact to be created. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. - state (google.cloud.gapic.types.Artifact.State): - Optional. The state of this Artifact. This is a - property of the Artifact, and does not imply or - capture any ongoing process. This property is - managed by clients (such as Vertex AI - Pipelines), and the system does not prescribe or - check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = dataset_name - super(Dataset, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=dataset_name, - uri=uri, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - state=state, - kwargs=kwargs, - ) - - -class Metrics(base_artifact.BaseArtifactSchema): - """Artifact schema for scalar metrics.""" - - SCHEMA_TITLE = "system.Metrics" - - def __init__( - self, - metrics_name: Optional[str] = None, - accuracy: Optional[float] = 0, - precision: Optional[float] = 0, - recall: Optional[float] = 0, - f1score: Optional[float] = 0, - mean_absolute_error: Optional[float] = 0, - mean_squared_error: Optional[float] = 0, - uri: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - description: Optional[str] = None, - metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, - ): - """Args: - metrics_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - accuracy (float): - Optional. Defaults to zero. - precision (float): - Optional. Defaults to zero. - recall (float): - Optional. Defaults to zero. - f1score (float): - Optional. Defaults to zero. - mean_absolute_error (float): - Optional. Defaults to zero. - mean_squared_error (float): - Optional. Defaults to zero. - uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. - display_name (str): - Optional. The user-defined name of the base. - schema_version (str): - Optional. schema_version specifies the version used by the base. - If not set, defaults to use the latest version. - description (str): - Optional. Describes the purpose of the Artifact to be created. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. - state (google.cloud.gapic.types.Artifact.State): - Optional. The state of this Artifact. This is a - property of the Artifact, and does not imply or - capture any ongoing process. This property is - managed by clients (such as Vertex AI - Pipelines), and the system does not prescribe or - check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["accuracy"] = accuracy - extended_metadata["precision"] = precision - extended_metadata["recall"] = recall - extended_metadata["f1score"] = f1score - extended_metadata["mean_absolute_error"] = mean_absolute_error - extended_metadata["mean_squared_error"] = mean_squared_error - extended_metadata["resourceName"] = metrics_name - - super(Metrics, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=metrics_name, - uri=uri, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - state=state, - kwargs=kwargs, - ) - - -class ContainerExecution(base_execution.BaseExecutionSchema): - """Execution schema for a container execution.""" - - SCHEMA_TITLE = "system.ContainerExecution" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(ContainerExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) - - -class ImporterExecution(base_execution.BaseExecutionSchema): - """Execution schema for a importer execution.""" - - SCHEMA_TITLE = "system.ImporterExecution" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(ImporterExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) - - -class ResolverExecution(base_execution.BaseExecutionSchema): - """Execution schema for a resolver execution.""" - - SCHEMA_TITLE = "system.ResolverExecution" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(ResolverExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) - - -class DagExecution(base_execution.BaseExecutionSchema): - """Execution schema for a dag execution.""" - - SCHEMA_TITLE = "system.DagExecution" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(DagExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) - - -class CustomJobExecution(base_execution.BaseExecutionSchema): - """Execution schema for a custom job execution.""" - - SCHEMA_TITLE = "system.CustomJobExecution" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(CustomJobExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) - - -class Run(base_execution.BaseExecutionSchema): - """Execution schema for root run execution.""" - - SCHEMA_TITLE = "system.Run" - - def __init__( - self, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, - display_name: Optional[str] = None, - schema_version: Optional[str] = None, - metadata: Optional[Dict] = None, - description: Optional[str] = None, - **kwargs, - ): - """Args: - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. - """ - extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name - super(Run, self).__init__( - schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, - state=state, - display_name=display_name, - schema_version=schema_version, - description=description, - metadata=extended_metadata, - kwargs=kwargs, - ) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index 60a3894b73..f71a45c226 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -14,15 +14,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json + import pytest from google.cloud import aiplatform -from tests.system.aiplatform import e2e_base -from google.cloud.aiplatform.metadata.schema import google_schema -from google.cloud.aiplatform.metadata.schema import system_schema from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import base_execution -import json +from google.cloud.aiplatform.metadata.schema import google_artifact_schema +from google.cloud.aiplatform.metadata.schema import system_artifact_schema +from google.cloud.aiplatform.metadata.schema import system_execution_schema + +from tests.system.aiplatform import e2e_base @pytest.mark.usefixtures("tear_down_resources") @@ -66,7 +69,7 @@ def test_system_dataset_artifact_create(self): location=e2e_base._LOCATION, ) - artifact = system_schema.Dataset( + artifact = system_artifact_schema.Dataset( display_name=artifact_display_name, uri=artifact_uri, metadata=artifact_metadata, @@ -93,7 +96,7 @@ def test_google_dataset_artifact_create(self): location=e2e_base._LOCATION, ) - artifact = google_schema.VertexDataset( + artifact = google_artifact_schema.VertexDataset( dataset_name=dataset_name, display_name=artifact_display_name, uri=artifact_uri, @@ -140,7 +143,7 @@ def test_execution_create_using_system_schema_class(self): location=e2e_base._LOCATION, ) - execution = system_schema.CustomJobExecution( + execution = system_execution_schema.CustomJobExecution( display_name=execution_display_name, description=execution_description, ).create() @@ -160,7 +163,7 @@ def test_execution_start_execution_using_system_schema_class(self): location=e2e_base._LOCATION, ) - execution = system_schema.ContainerExecution( + execution = system_execution_schema.ContainerExecution( display_name=execution_display_name, description=execution_description, ).start_execution() diff --git a/tests/unit/aiplatform/test_metadata_schema_types.py b/tests/unit/aiplatform/test_metadata_schema.py similarity index 91% rename from tests/unit/aiplatform/test_metadata_schema_types.py rename to tests/unit/aiplatform/test_metadata_schema.py index b5f9cfefe8..a07efbfb4f 100644 --- a/tests/unit/aiplatform/test_metadata_schema_types.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -19,24 +19,24 @@ from importlib import reload from unittest import mock from unittest.mock import patch - import pytest + from google.cloud import aiplatform from google.cloud.aiplatform import initializer +from google.cloud.aiplatform.compat.types import artifact as gca_artifact +from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata import metadata from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import base_execution -from google.cloud.aiplatform.metadata.schema import google_schema -from google.cloud.aiplatform.metadata.schema import system_schema +from google.cloud.aiplatform.metadata.schema import google_artifact_schema +from google.cloud.aiplatform.metadata.schema import system_artifact_schema +from google.cloud.aiplatform.metadata.schema import system_execution_schema from google.cloud.aiplatform.metadata.schema import utils - -from google.cloud.aiplatform.compat.types import artifact as gca_artifact -from google.cloud.aiplatform.compat.types import execution as gca_execution - from google.cloud.aiplatform_v1 import MetadataServiceClient from google.cloud.aiplatform_v1 import Artifact as GapicArtifact from google.cloud.aiplatform_v1 import Execution as GapicExecution + # project _TEST_PROJECT = "test-project" _TEST_LOCATION = "us-central1" @@ -283,15 +283,17 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_vertex_dataset_schema_title_is_set_correctly(self): - artifact = google_schema.VertexDataset() + artifact = google_artifact_schema.VertexDataset() assert artifact.schema_title == "google.VertexDataset" def test_vertex_dataset_resouce_name_is_set_in_metadata(self): - artifact = google_schema.VertexDataset(dataset_name=_TEST_ARTIFACT_NAME) + artifact = google_artifact_schema.VertexDataset( + dataset_name=_TEST_ARTIFACT_NAME + ) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_dataset_constructor_parameters_are_set_correctly(self): - artifact = google_schema.VertexDataset( + artifact = google_artifact_schema.VertexDataset( dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -306,15 +308,17 @@ def test_vertex_dataset_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_model_schema_title_is_set_correctly(self): - artifact = google_schema.VertexModel() + artifact = google_artifact_schema.VertexModel() assert artifact.schema_title == "google.VertexModel" def test_vertex_model_resouce_name_is_set_in_metadata(self): - artifact = google_schema.VertexModel(vertex_model_name=_TEST_ARTIFACT_NAME) + artifact = google_artifact_schema.VertexModel( + vertex_model_name=_TEST_ARTIFACT_NAME + ) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_model_constructor_parameters_are_set_correctly(self): - artifact = google_schema.VertexModel( + artifact = google_artifact_schema.VertexModel( vertex_model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -329,17 +333,17 @@ def test_vertex_model_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_endpoint_schema_title_is_set_correctly(self): - artifact = google_schema.VertexEndpoint() + artifact = google_artifact_schema.VertexEndpoint() assert artifact.schema_title == "google.VertexEndpoint" def test_vertex_endpoint_resouce_name_is_set_in_metadata(self): - artifact = google_schema.VertexEndpoint( + artifact = google_artifact_schema.VertexEndpoint( vertex_endpoint_name=_TEST_ARTIFACT_NAME ) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): - artifact = google_schema.VertexEndpoint( + artifact = google_artifact_schema.VertexEndpoint( vertex_endpoint_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -363,7 +367,7 @@ def test_unmanaged_container_model_title_is_set_correctly(self): container_spec = utils.ContainerSpec( image_uri="gcr.io/test_container_image_uri" ) - artifact = google_schema.UnmanagedContainerModel( + artifact = google_artifact_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, ) @@ -379,7 +383,7 @@ def test_unmanaged_container_model_resouce_name_is_set_in_metadata(self): container_spec = utils.ContainerSpec( image_uri="gcr.io/test_container_image_uri" ) - artifact = google_schema.UnmanagedContainerModel( + artifact = google_artifact_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, unmanaged_container_model_name=_TEST_ARTIFACT_NAME, @@ -397,7 +401,7 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self image_uri="gcr.io/test_container_image_uri" ) - artifact = google_schema.UnmanagedContainerModel( + artifact = google_artifact_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, unmanaged_container_model_name=_TEST_ARTIFACT_NAME, @@ -424,15 +428,15 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_system_dataset_schema_title_is_set_correctly(self): - artifact = system_schema.Dataset() + artifact = system_artifact_schema.Dataset() assert artifact.schema_title == "system.Dataset" def test_system_dataset_resouce_name_is_set_in_metadata(self): - artifact = system_schema.Dataset(dataset_name=_TEST_ARTIFACT_NAME) + artifact = system_artifact_schema.Dataset(dataset_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_dataset_constructor_parameters_are_set_correctly(self): - artifact = system_schema.Dataset( + artifact = system_artifact_schema.Dataset( dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -447,15 +451,15 @@ def test_system_dataset_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_system_model_schema_title_is_set_correctly(self): - artifact = system_schema.Model() + artifact = system_artifact_schema.Model() assert artifact.schema_title == "system.Model" def test_system_model_resouce_name_is_set_in_metadata(self): - artifact = system_schema.Model(model_name=_TEST_ARTIFACT_NAME) + artifact = system_artifact_schema.Model(model_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_model_constructor_parameters_are_set_correctly(self): - artifact = system_schema.Model( + artifact = system_artifact_schema.Model( model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, @@ -470,15 +474,15 @@ def test_system_model_constructor_parameters_are_set_correctly(self): assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_system_metrics_schema_title_is_set_correctly(self): - artifact = system_schema.Metrics() + artifact = system_artifact_schema.Metrics() assert artifact.schema_title == "system.Metrics" def test_system_metrics_resouce_name_is_set_in_metadata(self): - artifact = system_schema.Metrics(metrics_name=_TEST_ARTIFACT_NAME) + artifact = system_artifact_schema.Metrics(metrics_name=_TEST_ARTIFACT_NAME) assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME def test_system_metrics_constructor_parameters_are_set_correctly(self): - artifact = system_schema.Metrics( + artifact = system_artifact_schema.Metrics( metrics_name=_TEST_ARTIFACT_NAME, accuracy=0.1, precision=0.2, @@ -505,27 +509,15 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): # Test system.Execution Schemas def test_system_container_execution_schema_title_is_set_correctly(self): - execution = system_schema.ContainerExecution() + execution = system_execution_schema.ContainerExecution() assert execution.schema_title == "system.ContainerExecution" - def test_system_importer_execution_schema_title_is_set_correctly(self): - execution = system_schema.ImporterExecution() - assert execution.schema_title == "system.ImporterExecution" - - def test_system_resolver_execution_schema_title_is_set_correctly(self): - execution = system_schema.ResolverExecution() - assert execution.schema_title == "system.ResolverExecution" - - def test_system_dag_execution_schema_title_is_set_correctly(self): - execution = system_schema.DagExecution() - assert execution.schema_title == "system.DagExecution" - def test_system_custom_job_execution_schema_title_is_set_correctly(self): - execution = system_schema.CustomJobExecution() + execution = system_execution_schema.CustomJobExecution() assert execution.schema_title == "system.CustomJobExecution" def test_system_run_execution_schema_title_is_set_correctly(self): - execution = system_schema.Run() + execution = system_execution_schema.Run() assert execution.schema_title == "system.Run" From 02d6069cdb177186425eca3310251449c2a118bc Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 18:43:38 +0000 Subject: [PATCH 18/48] regroup tests to match module names --- tests/unit/aiplatform/test_metadata_schema.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index a07efbfb4f..f2ec69482b 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -273,7 +273,7 @@ def test_start_execution_method_calls_gapic_library_with_correct_parameters( assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA -class TestMetadataGoogleSchema: +class TestMetadataGoogleArtifactSchema: def setup_method(self): reload(initializer) reload(metadata) @@ -418,7 +418,7 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self assert artifact.schema_version == _TEST_SCHEMA_VERSION -class TestMetadataSystemSchema: +class TestMetadataSystemArtifactSchema: def setup_method(self): reload(initializer) reload(metadata) @@ -507,6 +507,16 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): assert artifact.metadata["mean_absolute_error"] == 0.5 assert artifact.metadata["mean_squared_error"] == 0.6 + +class TestMetadataSystemSchemaExecution: + def setup_method(self): + reload(initializer) + reload(metadata) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + # Test system.Execution Schemas def test_system_container_execution_schema_title_is_set_correctly(self): execution = system_execution_schema.ContainerExecution() From abe3d48efb4ec1d268161bad8ced13193bdbc850 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 19:09:45 +0000 Subject: [PATCH 19/48] fix e2e integration tests --- .../aiplatform/test_e2e_metadata_schema.py | 104 ++++++++---------- 1 file changed, 45 insertions(+), 59 deletions(-) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index f71a45c226..9429bb1637 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -30,13 +30,19 @@ @pytest.mark.usefixtures("tear_down_resources") class TestMetadataSchema(e2e_base.TestEndToEnd): - def test_artifact_creation_using_schema_base_class(self): + _temp_prefix = "tmpvrtxsdk-e2e" + + def setup_class(cls): # Truncating the name because of resource id constraints from the service - artifact_display_name = self._make_display_name("base-artifact")[:30] - artifact_uri = self._make_display_name("base-uri") - artifact_metadata = {"test_property": "test_value"} - artifact_description = self._make_display_name("base-description") + cls.artifact_display_name = cls._make_display_name("base-artifact")[:30] + cls.artifact_uri = cls._make_display_name("base-uri") + cls.artifact_metadata = {"test_property": "test_value"} + cls.artifact_description = cls._make_display_name("base-description") + cls.execution_display_name = cls._make_display_name("base-execution")[:30] + cls.execution_description = cls._make_display_name("base-description") + + def test_artifact_creation_using_schema_base_class(self): aiplatform.init( project=e2e_base._PROJECT, @@ -44,52 +50,42 @@ def test_artifact_creation_using_schema_base_class(self): ) artifact = base_artifact.BaseArtifactSchema( - display_name=artifact_display_name, - uri=artifact_uri, - metadata=artifact_metadata, - description=artifact_description, + display_name=self.artifact_display_name, + uri=self.artifact_uri, + metadata=self.artifact_metadata, + description=self.artifact_description, ).create() - assert artifact.display_name == artifact_display_name - assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.display_name == self.artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(self.artifact_metadata) assert artifact.schema_title == "system.Artifact" - assert artifact.description == artifact_description + assert artifact.description == self.artifact_description assert "/metadataStores/default/artifacts/" in artifact.resource_name def test_system_dataset_artifact_create(self): - # Truncating the name because of resource id constraints from the service - artifact_display_name = self._make_display_name("dataset-artifact")[:30] - artifact_uri = self._make_display_name("dataset-uri") - artifact_metadata = {"test_property": "test_value"} - artifact_description = self._make_display_name("dataset-description") - aiplatform.init( project=e2e_base._PROJECT, location=e2e_base._LOCATION, ) artifact = system_artifact_schema.Dataset( - display_name=artifact_display_name, - uri=artifact_uri, - metadata=artifact_metadata, - description=artifact_description, + display_name=self.artifact_display_name, + uri=self.artifact_uri, + metadata=self.artifact_metadata, + description=self.artifact_description, ).create() - assert artifact.display_name == artifact_display_name - assert json.dumps(artifact.metadata) == json.dumps(artifact_metadata) + assert artifact.display_name == self.artifact_display_name + assert json.dumps(artifact.metadata) == json.dumps(self.artifact_metadata) assert artifact.schema_title == "system.Dataset" - assert artifact.description == artifact_description + assert artifact.description == self.artifact_description assert "/metadataStores/default/artifacts/" in artifact.resource_name def test_google_dataset_artifact_create(self): # Truncating the name because of resource id constraints from the service - artifact_display_name = self._make_display_name("ds-artifact")[:30] - artifact_uri = self._make_display_name("vertex-dataset-uri") - artifact_metadata = {"test_property": "test_value"} - artifact_description = self._make_display_name("vertex-dataset-description") - dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/{artifact_display_name}" + dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/{self.artifact_display_name}" aiplatform.init( project=e2e_base._PROJECT, @@ -98,45 +94,38 @@ def test_google_dataset_artifact_create(self): artifact = google_artifact_schema.VertexDataset( dataset_name=dataset_name, - display_name=artifact_display_name, - uri=artifact_uri, - metadata=artifact_metadata, - description=artifact_description, + display_name=self.artifact_display_name, + uri=self.artifact_uri, + metadata=self.artifact_metadata, + description=self.artifact_description, ).create() - expected_metadata = artifact_metadata + expected_metadata = self.artifact_metadata expected_metadata["resourceName"] = dataset_name - assert artifact.display_name == artifact_display_name + assert artifact.display_name == self.artifact_display_name assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) assert artifact.schema_title == "google.VertexDataset" - assert artifact.description == artifact_description + assert artifact.description == self.artifact_description assert "/metadataStores/default/artifacts/" in artifact.resource_name def test_execution_create_using_schema_base_class(self): - # Truncating the name because of resource id constraints from the service - execution_display_name = self._make_display_name("base-execution")[:30] - execution_description = self._make_display_name("base-description") - aiplatform.init( project=e2e_base._PROJECT, location=e2e_base._LOCATION, ) execution = base_execution.BaseExecutionSchema( - display_name=execution_display_name, - description=execution_description, + display_name=self.execution_display_name, + description=self.execution_description, ).create() - assert execution.display_name == execution_display_name + assert execution.display_name == self.execution_display_name assert execution.schema_title == "system.ContainerExecution" - assert execution.description == execution_description + assert execution.description == self.execution_description assert "/metadataStores/default/executions/" in execution.resource_name def test_execution_create_using_system_schema_class(self): - # Truncating the name because of resource id constraints from the service - execution_display_name = self._make_display_name("base-execution")[:30] - execution_description = self._make_display_name("base-description") aiplatform.init( project=e2e_base._PROJECT, @@ -144,19 +133,16 @@ def test_execution_create_using_system_schema_class(self): ) execution = system_execution_schema.CustomJobExecution( - display_name=execution_display_name, - description=execution_description, + display_name=self.execution_display_name, + description=self.execution_description, ).create() - assert execution.display_name == execution_display_name + assert execution.display_name == self.execution_display_name assert execution.schema_title == "system.CustomJobExecution" - assert execution.description == execution_description + assert execution.description == self.execution_description assert "/metadataStores/default/executions/" in execution.resource_name def test_execution_start_execution_using_system_schema_class(self): - # Truncating the name because of resource id constraints from the service - execution_display_name = self._make_display_name("base-execution")[:30] - execution_description = self._make_display_name("base-description") aiplatform.init( project=e2e_base._PROJECT, @@ -164,11 +150,11 @@ def test_execution_start_execution_using_system_schema_class(self): ) execution = system_execution_schema.ContainerExecution( - display_name=execution_display_name, - description=execution_description, + display_name=self.execution_display_name, + description=self.execution_description, ).start_execution() - assert execution.display_name == execution_display_name + assert execution.display_name == self.execution_display_name assert execution.schema_title == "system.ContainerExecution" - assert execution.description == execution_description + assert execution.description == self.execution_description assert "/metadataStores/default/executions/" in execution.resource_name From 8056a41886ca16a1b29eee3ac21ce344e03daf73 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 20:28:07 +0000 Subject: [PATCH 20/48] remove call to _temp_prefix = tmpvrtxsdk-e2e from E2E test --- tests/system/aiplatform/test_e2e_metadata_schema.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index 9429bb1637..b42b4cdf4b 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -30,9 +30,6 @@ @pytest.mark.usefixtures("tear_down_resources") class TestMetadataSchema(e2e_base.TestEndToEnd): - - _temp_prefix = "tmpvrtxsdk-e2e" - def setup_class(cls): # Truncating the name because of resource id constraints from the service cls.artifact_display_name = cls._make_display_name("base-artifact")[:30] From d2daa78d7bc45e05047544498842d693f5876c0c Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 15:26:24 -0700 Subject: [PATCH 21/48] Update google/cloud/aiplatform/metadata/schema/base_execution.py Co-authored-by: sasha-gitg <44654632+sasha-gitg@users.noreply.github.com> --- google/cloud/aiplatform/metadata/schema/base_execution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index c237c0d56a..c265713322 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -152,7 +152,7 @@ def start_execution( Execution: Instantiated representation of the managed Metadata Execution. """ - self.exectuion = metadata._ExperimentTracker().start_execution( + self.execution = metadata._ExperimentTracker().start_execution( base_execution_schema=self, resume=False, metadata_store_id=metadata_store_id, From 839b708f3c22f9af6ae0302d8767c3cb90b8b3fe Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 22:52:53 +0000 Subject: [PATCH 22/48] remove artifact and schema referencing the create result to self --- google/cloud/aiplatform/metadata/artifact.py | 6 +++--- google/cloud/aiplatform/metadata/schema/base_artifact.py | 3 +-- google/cloud/aiplatform/metadata/schema/base_execution.py | 7 +++---- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index fb1469d667..9519a816eb 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -15,13 +15,14 @@ # limitations under the License. # -import proto + from typing import Optional, Dict, Union +import proto + from google.auth import credentials as auth_credentials from google.cloud.aiplatform import base - from google.cloud.aiplatform import models from google.cloud.aiplatform import utils from google.cloud.aiplatform.compat.types import artifact as gca_artifact @@ -31,7 +32,6 @@ from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform.metadata import resource from google.cloud.aiplatform.metadata import utils as metadata_utils - from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.utils import rest_utils diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 26545bba0e..b2a36cfb55 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -126,11 +126,10 @@ def create( Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ - self.artifact = artifact.Artifact.create_from_base_schema( + return artifact.Artifact.create_from_base_schema( base_artifact_schema=self, metadata_store_id=metadata_store_id, project=project, location=location, credentials=credentials, ) - return self.artifact diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index c265713322..b75fa7624a 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -115,14 +115,14 @@ def create( Execution: Instantiated representation of the managed Metadata Execution. """ - self.exectuion = execution.Execution.create( + self.execution = execution.Execution.create( base_execution_schema=self, metadata_store_id=metadata_store_id, project=project, location=location, credentials=credentials, ) - return self.exectuion + return self.execution def start_execution( self, @@ -152,7 +152,7 @@ def start_execution( Execution: Instantiated representation of the managed Metadata Execution. """ - self.execution = metadata._ExperimentTracker().start_execution( + return metadata._ExperimentTracker().start_execution( base_execution_schema=self, resume=False, metadata_store_id=metadata_store_id, @@ -160,4 +160,3 @@ def start_execution( location=location, credentials=credentials, ) - return self.exectuion From 81beab2b456c8698b67f2cca1a3616f2d13a1f31 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 23:06:11 +0000 Subject: [PATCH 23/48] remove kwargs --- .../aiplatform/metadata/schema/base_artifact.py | 4 ---- .../aiplatform/metadata/schema/base_execution.py | 4 ---- .../metadata/schema/google_artifact_schema.py | 16 ---------------- .../metadata/schema/system_artifact_schema.py | 12 ------------ .../metadata/schema/system_execution_schema.py | 12 ------------ 5 files changed, 48 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index b2a36cfb55..17ee27c030 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -59,8 +59,6 @@ class BaseArtifactSchema(object): managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" @@ -76,7 +74,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Initializes the Artifact with the given name, URI and metadata.""" @@ -97,7 +94,6 @@ def __init__( self.description = description self.metadata = metadata self.state = state - self.kwargs = kwargs def create( self, diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index b75fa7624a..bd4a35b6dc 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -48,8 +48,6 @@ class BaseExecutionSchema(object): Optional. Contains the metadata information that will be stored in the Execution. description (str): Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. """ ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" @@ -64,7 +62,6 @@ def __init__( schema_version: Optional[str] = None, metadata: Optional[Dict] = None, description: Optional[str] = None, - **kwargs, ): """Initializes the Execution with the given name, URI and metadata.""" @@ -85,7 +82,6 @@ def __init__( self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION self.metadata = metadata self.description = description - self.kwargs = kwargs def create( self, diff --git a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py index 7dd2fa088c..71ab3be62e 100644 --- a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py @@ -35,7 +35,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: dataset_name (str): @@ -62,8 +61,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = dataset_name @@ -76,7 +73,6 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) @@ -94,7 +90,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: vertex_model_name (str): @@ -121,8 +116,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} @@ -137,7 +130,6 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) @@ -155,7 +147,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: vertex_endpoint_name (str): @@ -182,8 +173,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = vertex_endpoint_name @@ -197,7 +186,6 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) @@ -217,7 +205,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: predict_schema_ta (PredictSchemata): @@ -247,8 +234,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = unmanaged_container_model_name @@ -264,5 +249,4 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) diff --git a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py index e24c7e7042..182cc04dbc 100644 --- a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py @@ -35,7 +35,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: model_name (str): @@ -61,8 +60,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = model_name @@ -75,7 +72,6 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) @@ -93,7 +89,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: dataset_name (str): @@ -119,8 +114,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = dataset_name @@ -133,7 +126,6 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) @@ -157,7 +149,6 @@ def __init__( description: Optional[str] = None, metadata: Optional[Dict] = None, state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, - **kwargs, ): """Args: metrics_name (str): @@ -195,8 +186,6 @@ def __init__( managed by clients (such as Vertex AI Pipelines), and the system does not prescribe or check the validity of state transitions. - **kwargs: - Optional. Additional Args that will be passed directly to the Artifact base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["accuracy"] = accuracy @@ -216,5 +205,4 @@ def __init__( description=description, metadata=extended_metadata, state=state, - kwargs=kwargs, ) diff --git a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py index c66dd99fc6..ded5970275 100644 --- a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py @@ -34,7 +34,6 @@ def __init__( schema_version: Optional[str] = None, metadata: Optional[Dict] = None, description: Optional[str] = None, - **kwargs, ): """Args: state (gca_execution.Execution.State.RUNNING): @@ -52,8 +51,6 @@ def __init__( Optional. Contains the metadata information that will be stored in the Execution. description (str): Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = execution_name @@ -65,7 +62,6 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, - kwargs=kwargs, ) @@ -82,7 +78,6 @@ def __init__( schema_version: Optional[str] = None, metadata: Optional[Dict] = None, description: Optional[str] = None, - **kwargs, ): """Args: state (gca_execution.Execution.State.RUNNING): @@ -100,8 +95,6 @@ def __init__( Optional. Contains the metadata information that will be stored in the Execution. description (str): Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = execution_name @@ -113,7 +106,6 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, - kwargs=kwargs, ) @@ -130,7 +122,6 @@ def __init__( schema_version: Optional[str] = None, metadata: Optional[Dict] = None, description: Optional[str] = None, - **kwargs, ): """Args: state (gca_execution.Execution.State.RUNNING): @@ -148,8 +139,6 @@ def __init__( Optional. Contains the metadata information that will be stored in the Execution. description (str): Optional. Describes the purpose of the Execution to be created. - **kwargs: - Optional. Additional Args that will be passed directly to the Execution base method for backward compatibility. """ extended_metadata = metadata or {} extended_metadata["resourceName"] = execution_name @@ -161,5 +150,4 @@ def __init__( schema_version=schema_version, description=description, metadata=extended_metadata, - kwargs=kwargs, ) From a308bfd628ab6f5015539c1fcbf9eb73ce07fc5d Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 23:08:35 +0000 Subject: [PATCH 24/48] fix typing for container spec --- .../cloud/aiplatform/metadata/schema/google_artifact_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py index 71ab3be62e..0cb179033e 100644 --- a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py @@ -197,7 +197,7 @@ class UnmanagedContainerModel(base_artifact.BaseArtifactSchema): def __init__( self, predict_schema_ta: utils.PredictSchemata, - container_spec: utils.PredictSchemata, + container_spec: utils.ContainerSpec, unmanaged_container_model_name: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, From 3122e7da1686773389cf5fffb17a97037f26dace Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 23:19:29 +0000 Subject: [PATCH 25/48] remove resouceName from system types --- .../metadata/schema/system_artifact_schema.py | 23 +------------------ tests/unit/aiplatform/test_metadata_schema.py | 15 ------------ 2 files changed, 1 insertion(+), 37 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py index 182cc04dbc..20497bacd6 100644 --- a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py @@ -22,13 +22,12 @@ class Model(base_artifact.BaseArtifactSchema): - """Schemaless Artifact to store Markdown file.""" + """Artifact type for model.""" SCHEMA_TITLE = "system.Model" def __init__( self, - model_name: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -37,10 +36,6 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - model_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. @@ -62,10 +57,8 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = model_name super(Model, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=model_name, uri=uri, display_name=display_name, schema_version=schema_version, @@ -82,7 +75,6 @@ class Dataset(base_artifact.BaseArtifactSchema): def __init__( self, - dataset_name: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -91,10 +83,6 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - dataset_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. @@ -116,10 +104,8 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = dataset_name super(Dataset, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=dataset_name, uri=uri, display_name=display_name, schema_version=schema_version, @@ -136,7 +122,6 @@ class Metrics(base_artifact.BaseArtifactSchema): def __init__( self, - metrics_name: Optional[str] = None, accuracy: Optional[float] = 0, precision: Optional[float] = 0, recall: Optional[float] = 0, @@ -151,10 +136,6 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - metrics_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. accuracy (float): Optional. Defaults to zero. precision (float): @@ -194,11 +175,9 @@ def __init__( extended_metadata["f1score"] = f1score extended_metadata["mean_absolute_error"] = mean_absolute_error extended_metadata["mean_squared_error"] = mean_squared_error - extended_metadata["resourceName"] = metrics_name super(Metrics, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=metrics_name, uri=uri, display_name=display_name, schema_version=schema_version, diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index f2ec69482b..095df4eed2 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -431,13 +431,8 @@ def test_system_dataset_schema_title_is_set_correctly(self): artifact = system_artifact_schema.Dataset() assert artifact.schema_title == "system.Dataset" - def test_system_dataset_resouce_name_is_set_in_metadata(self): - artifact = system_artifact_schema.Dataset(dataset_name=_TEST_ARTIFACT_NAME) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_system_dataset_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Dataset( - dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, @@ -454,13 +449,8 @@ def test_system_model_schema_title_is_set_correctly(self): artifact = system_artifact_schema.Model() assert artifact.schema_title == "system.Model" - def test_system_model_resouce_name_is_set_in_metadata(self): - artifact = system_artifact_schema.Model(model_name=_TEST_ARTIFACT_NAME) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_system_model_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Model( - model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, @@ -477,13 +467,8 @@ def test_system_metrics_schema_title_is_set_correctly(self): artifact = system_artifact_schema.Metrics() assert artifact.schema_title == "system.Metrics" - def test_system_metrics_resouce_name_is_set_in_metadata(self): - artifact = system_artifact_schema.Metrics(metrics_name=_TEST_ARTIFACT_NAME) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_system_metrics_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Metrics( - metrics_name=_TEST_ARTIFACT_NAME, accuracy=0.1, precision=0.2, recall=0.3, From 30c0fcd6f4712c693592909e10cd008f190e3796 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 16 Jun 2022 23:24:53 +0000 Subject: [PATCH 26/48] metrics should default to None --- .../metadata/schema/system_artifact_schema.py | 42 +++++++++++-------- tests/unit/aiplatform/test_metadata_schema.py | 4 ++ 2 files changed, 28 insertions(+), 18 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py index 20497bacd6..401634b6b5 100644 --- a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py @@ -122,12 +122,12 @@ class Metrics(base_artifact.BaseArtifactSchema): def __init__( self, - accuracy: Optional[float] = 0, - precision: Optional[float] = 0, - recall: Optional[float] = 0, - f1score: Optional[float] = 0, - mean_absolute_error: Optional[float] = 0, - mean_squared_error: Optional[float] = 0, + accuracy: Optional[float] = None, + precision: Optional[float] = None, + recall: Optional[float] = None, + f1score: Optional[float] = None, + mean_absolute_error: Optional[float] = None, + mean_squared_error: Optional[float] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -137,17 +137,17 @@ def __init__( ): """Args: accuracy (float): - Optional. Defaults to zero. + Optional. precision (float): - Optional. Defaults to zero. + Optional. recall (float): - Optional. Defaults to zero. + Optional. f1score (float): - Optional. Defaults to zero. + Optional. mean_absolute_error (float): - Optional. Defaults to zero. + Optional. mean_squared_error (float): - Optional. Defaults to zero. + Optional. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. @@ -169,12 +169,18 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["accuracy"] = accuracy - extended_metadata["precision"] = precision - extended_metadata["recall"] = recall - extended_metadata["f1score"] = f1score - extended_metadata["mean_absolute_error"] = mean_absolute_error - extended_metadata["mean_squared_error"] = mean_squared_error + if accuracy: + extended_metadata["accuracy"] = accuracy + if precision: + extended_metadata["precision"] = precision + if recall: + extended_metadata["recall"] = recall + if f1score: + extended_metadata["f1score"] = f1score + if mean_absolute_error: + extended_metadata["mean_absolute_error"] = mean_absolute_error + if mean_squared_error: + extended_metadata["mean_squared_error"] = mean_squared_error super(Metrics, self).__init__( schema_title=self.SCHEMA_TITLE, diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 095df4eed2..2a09172351 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -467,6 +467,10 @@ def test_system_metrics_schema_title_is_set_correctly(self): artifact = system_artifact_schema.Metrics() assert artifact.schema_title == "system.Metrics" + def test_system_metrics_values_default_to_none(self): + artifact = system_artifact_schema.Metrics() + assert artifact.metadata == {} + def test_system_metrics_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Metrics( accuracy=0.1, From 476946f469dacf23727bb6beb588bde9f252ef32 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Fri, 17 Jun 2022 14:19:57 +0000 Subject: [PATCH 27/48] change from using resouce_name to resource_id --- .../metadata/schema/base_artifact.py | 17 ++--- .../metadata/schema/base_execution.py | 20 ++---- .../metadata/schema/google_artifact_schema.py | 42 ++++++------ .../schema/system_execution_schema.py | 33 +++++----- tests/unit/aiplatform/test_metadata_schema.py | 66 ++++--------------- 5 files changed, 57 insertions(+), 121 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 17ee27c030..be76f61ec8 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -36,9 +36,9 @@ class BaseArtifactSchema(object): Args: schema_title (str): Optional. The schema title used by the Artifact, defaults to "system.Artifact" - resource_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: + resource_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual @@ -67,7 +67,7 @@ class BaseArtifactSchema(object): def __init__( self, schema_title: Optional[str] = None, - resource_name: Optional[str] = None, + resource_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -80,14 +80,7 @@ def __init__( self.schema_title = BaseArtifactSchema.SCHEMA_TITLE if schema_title: self.schema_title = schema_title - self.resource_name = resource_name - - self.resource_id = None - if resource_name: - # Temporary work around while Artifact.create takes resource_id instead of resource_name - # TODO: switch to using resouce_name only when create resouce supports it. - self.resource_id = resource_name.split("/")[-1] - + self.resource_id = resource_id self.uri = uri self.display_name = display_name self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index bd4a35b6dc..5e49b1b63f 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -35,10 +35,10 @@ class BaseExecutionSchema(object): Required. schema_title identifies the schema title used by the Execution. state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. - resource_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. + resource_id (str): + Optional. The portion of the Execution name with + the following format, this is globally unique in a metadataStore. + projects/123/locations/us-central1/metadataStores//executions/. display_name (str): Optional. The user-defined name of the Execution. schema_version (str): @@ -57,7 +57,7 @@ def __init__( self, schema_title: Optional[str] = None, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - resource_name: Optional[str] = None, + resource_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, metadata: Optional[Dict] = None, @@ -68,16 +68,8 @@ def __init__( self.schema_title = BaseExecutionSchema.SCHEMA_TITLE if schema_title: self.schema_title = schema_title - self.resource_name = resource_name self.state = state - - self.resource_id = None - - if resource_name: - # Temporary work around while Execution.create takes resource_id instead of resource_name - # TODO: switch to using resouce_name only when create execution supports it. - self.resource_id = resource_name.split("/")[-1] - + self.resource_id = resource_id self.display_name = display_name self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION self.metadata = metadata diff --git a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py index 0cb179033e..b30cf5751b 100644 --- a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py @@ -28,7 +28,7 @@ class VertexDataset(base_artifact.BaseArtifactSchema): def __init__( self, - dataset_name: Optional[str] = None, + dataset_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -37,9 +37,9 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - dataset_name (str): - Optional. The name of the Dataset resource, in a form of - projects/{project}/locations/{location}/datasets/{datasets_name}. For + dataset_id (str): + Optional. The portion of the Artifact name, in a form of + projects/{project}/locations/{location}/datasets/{datasets_id}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets/get uri (str): @@ -63,10 +63,9 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = dataset_name super(VertexDataset, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=dataset_name, + resource_id=dataset_id, uri=uri, display_name=display_name, schema_version=schema_version, @@ -83,7 +82,7 @@ class VertexModel(base_artifact.BaseArtifactSchema): def __init__( self, - vertex_model_name: Optional[str] = None, + vertex_model_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -92,9 +91,9 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - vertex_model_name (str): - Optional. The name of the VertexModel resource, in a form of - projects/{project}/locations/{location}/models/{model}. For + vertex_model_id (str): + Optional. The portion of the Artifact name, in a form of + projects/{project}/locations/{location}/models/{model_id}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models/get uri (str): @@ -119,11 +118,10 @@ def __init__( """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = vertex_model_name super(VertexModel, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=vertex_model_name, + resource_id=vertex_model_id, uri=uri, display_name=display_name, schema_version=schema_version, @@ -140,7 +138,7 @@ class VertexEndpoint(base_artifact.BaseArtifactSchema): def __init__( self, - vertex_endpoint_name: Optional[str] = None, + vertex_endpoint_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -150,8 +148,8 @@ def __init__( ): """Args: vertex_endpoint_name (str): - Optional. The name of the VertexEndpoint resource, in a form of - projects/{project}/locations/{location}/endpoints/{endpoint}. For + Optional. The portion of the Artifact name, in a form of + projects/{project}/locations/{location}/endpoints/{endpoint_id}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/get uri (str): @@ -175,11 +173,10 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = vertex_endpoint_name super(VertexEndpoint, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=vertex_endpoint_name, + resource_id=vertex_endpoint_id, uri=uri, display_name=display_name, schema_version=schema_version, @@ -198,7 +195,7 @@ def __init__( self, predict_schema_ta: utils.PredictSchemata, container_spec: utils.ContainerSpec, - unmanaged_container_model_name: Optional[str] = None, + unmanaged_container_model_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -211,9 +208,9 @@ def __init__( An instance of PredictSchemata which holds instance, parameter and prediction schema uris. container_spec (ContainerSpec): An instance of ContainerSpec which holds the container configuration for the model. - unmanaged_container_model_name (str): - Optional. The resource name of the Artifact following the format as follows. - This is globally unique in a metadataStore: + unmanaged_container_model_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual @@ -236,13 +233,12 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = unmanaged_container_model_name extended_metadata["predictSchemata"] = predict_schema_ta.to_dict() extended_metadata["containerSpec"] = container_spec.to_dict() super(UnmanagedContainerModel, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=unmanaged_container_model_name, + resource_id=unmanaged_container_model_id, uri=uri, display_name=display_name, schema_version=schema_version, diff --git a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py index ded5970275..3efce269f5 100644 --- a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system_execution_schema.py @@ -29,7 +29,7 @@ class ContainerExecution(base_execution.BaseExecutionSchema): def __init__( self, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, + execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, metadata: Optional[Dict] = None, @@ -38,9 +38,9 @@ def __init__( """Args: state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: + execution_id (str): + Optional. The portion of the Execution name with + the following format, this is globally unique in a metadataStore. projects/123/locations/us-central1/metadataStores//executions/. display_name (str): Optional. The user-defined name of the Execution. @@ -53,10 +53,9 @@ def __init__( Optional. Describes the purpose of the Execution to be created. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name super(ContainerExecution, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, + resource_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, @@ -73,7 +72,7 @@ class CustomJobExecution(base_execution.BaseExecutionSchema): def __init__( self, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, + execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, metadata: Optional[Dict] = None, @@ -82,9 +81,9 @@ def __init__( """Args: state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: + execution_id (str): + Optional. The portion of the Execution name with + the following format, this is globally unique in a metadataStore. projects/123/locations/us-central1/metadataStores//executions/. display_name (str): Optional. The user-defined name of the Execution. @@ -97,10 +96,9 @@ def __init__( Optional. Describes the purpose of the Execution to be created. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name super(CustomJobExecution, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, + resource_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, @@ -117,7 +115,7 @@ class Run(base_execution.BaseExecutionSchema): def __init__( self, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - execution_name: Optional[str] = None, + execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, metadata: Optional[Dict] = None, @@ -126,9 +124,9 @@ def __init__( """Args: state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. - execution_name (str): - Optional. The resource name of the Execution following the format as follows. - This is globally unique in a metadataStore: + execution_id (str): + Optional. The portion of the Execution name with + the following format, this is globally unique in a metadataStore. projects/123/locations/us-central1/metadataStores//executions/. display_name (str): Optional. The user-defined name of the Execution. @@ -141,10 +139,9 @@ def __init__( Optional. Describes the purpose of the Execution to be created. """ extended_metadata = metadata or {} - extended_metadata["resourceName"] = execution_name super(Run, self).__init__( schema_title=self.SCHEMA_TITLE, - resource_name=execution_name, + resource_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 2a09172351..659856b5c5 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -41,7 +41,6 @@ _TEST_PROJECT = "test-project" _TEST_LOCATION = "us-central1" _TEST_METADATA_STORE = "test-metadata-store" -_TEST_ALT_LOCATION = "europe-west4" _TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/{_TEST_METADATA_STORE}" # resource attributes @@ -59,10 +58,6 @@ "test-param3": False, } -# context -_TEST_CONTEXT_ID = "test-context-id" -_TEST_CONTEXT_NAME = f"{_TEST_PARENT}/contexts/{_TEST_CONTEXT_ID}" - # artifact _TEST_ARTIFACT_ID = "test-artifact-id" _TEST_ARTIFACT_NAME = f"{_TEST_PARENT}/artifacts/{_TEST_ARTIFACT_ID}" @@ -129,22 +124,20 @@ def test_base_class_default_state(self): artifact = base_artifact.BaseArtifactSchema() assert artifact.state == gca_artifact.Artifact.State.LIVE - def test_base_class_overrides_resouce_id_from_resouce_name(self): - artifact = base_artifact.BaseArtifactSchema(resource_name=_TEST_ARTIFACT_NAME) - assert artifact.resource_id == _TEST_ARTIFACT_ID - def test_base_class_overrides_default_version(self): artifact = base_artifact.BaseArtifactSchema(schema_version=_TEST_SCHEMA_VERSION) assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_base_class_init_remaining_parameters_are_assigned_correctly(self): artifact = base_artifact.BaseArtifactSchema( + resource_id=_TEST_ARTIFACT_ID, schema_title=_TEST_SCHEMA_TITLE, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.schema_title == _TEST_SCHEMA_TITLE assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME @@ -200,12 +193,6 @@ def test_base_class_default_state(self): execution = base_execution.BaseExecutionSchema() assert execution.state == gca_execution.Execution.State.RUNNING - def test_base_class_overrides_resouce_id_from_resouce_name(self): - execution = base_execution.BaseExecutionSchema( - resource_name=_TEST_ARTIFACT_NAME - ) - assert execution.resource_id == _TEST_ARTIFACT_ID - def test_base_class_overrides_default_version(self): execution = base_execution.BaseExecutionSchema( schema_version=_TEST_SCHEMA_VERSION @@ -214,12 +201,14 @@ def test_base_class_overrides_default_version(self): def test_base_class_init_remaining_parameters_are_assigned_correctly(self): execution = base_execution.BaseExecutionSchema( + resource_id=_TEST_EXECUTION_ID, schema_title=_TEST_SCHEMA_TITLE, state=_TEST_EXECUTION_STATE, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert execution.resource_id == _TEST_EXECUTION_ID assert execution.schema_title == _TEST_SCHEMA_TITLE assert execution.state == _TEST_EXECUTION_STATE assert execution.display_name == _TEST_DISPLAY_NAME @@ -286,21 +275,16 @@ def test_vertex_dataset_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexDataset() assert artifact.schema_title == "google.VertexDataset" - def test_vertex_dataset_resouce_name_is_set_in_metadata(self): - artifact = google_artifact_schema.VertexDataset( - dataset_name=_TEST_ARTIFACT_NAME - ) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_vertex_dataset_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexDataset( - dataset_name=_TEST_ARTIFACT_NAME, + dataset_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION @@ -311,21 +295,16 @@ def test_vertex_model_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexModel() assert artifact.schema_title == "google.VertexModel" - def test_vertex_model_resouce_name_is_set_in_metadata(self): - artifact = google_artifact_schema.VertexModel( - vertex_model_name=_TEST_ARTIFACT_NAME - ) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_vertex_model_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexModel( - vertex_model_name=_TEST_ARTIFACT_NAME, + vertex_model_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION @@ -336,21 +315,16 @@ def test_vertex_endpoint_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexEndpoint() assert artifact.schema_title == "google.VertexEndpoint" - def test_vertex_endpoint_resouce_name_is_set_in_metadata(self): - artifact = google_artifact_schema.VertexEndpoint( - vertex_endpoint_name=_TEST_ARTIFACT_NAME - ) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexEndpoint( - vertex_endpoint_name=_TEST_ARTIFACT_NAME, + vertex_endpoint_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION @@ -373,23 +347,6 @@ def test_unmanaged_container_model_title_is_set_correctly(self): ) assert artifact.schema_title == "google.UnmanagedContainerModel" - def test_unmanaged_container_model_resouce_name_is_set_in_metadata(self): - predict_schema_ta = utils.PredictSchemata( - instance_schema_uri="instance_uri", - prediction_schema_uri="prediction_uri", - parameters_schema_uri="parameters_uri", - ) - - container_spec = utils.ContainerSpec( - image_uri="gcr.io/test_container_image_uri" - ) - artifact = google_artifact_schema.UnmanagedContainerModel( - predict_schema_ta=predict_schema_ta, - container_spec=container_spec, - unmanaged_container_model_name=_TEST_ARTIFACT_NAME, - ) - assert artifact.metadata["resourceName"] == _TEST_ARTIFACT_NAME - def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self): predict_schema_ta = utils.PredictSchemata( instance_schema_uri="instance_uri", @@ -404,13 +361,14 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self artifact = google_artifact_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, - unmanaged_container_model_name=_TEST_ARTIFACT_NAME, + unmanaged_container_model_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION From 4a2a3fc5946cb07e1b8ae37a0e79685ecce6a424 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Fri, 17 Jun 2022 14:35:07 +0000 Subject: [PATCH 28/48] fix e2e tests --- tests/system/aiplatform/test_e2e_metadata_schema.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index b42b4cdf4b..807b8712ce 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -24,15 +24,18 @@ from google.cloud.aiplatform.metadata.schema import google_artifact_schema from google.cloud.aiplatform.metadata.schema import system_artifact_schema from google.cloud.aiplatform.metadata.schema import system_execution_schema - from tests.system.aiplatform import e2e_base @pytest.mark.usefixtures("tear_down_resources") class TestMetadataSchema(e2e_base.TestEndToEnd): + + _temp_prefix = "tmpvrtxmlmdsdk-e2e" + def setup_class(cls): # Truncating the name because of resource id constraints from the service cls.artifact_display_name = cls._make_display_name("base-artifact")[:30] + cls.artifact_id = cls._make_display_name("base-artifact-id")[:30] cls.artifact_uri = cls._make_display_name("base-uri") cls.artifact_metadata = {"test_property": "test_value"} cls.artifact_description = cls._make_display_name("base-description") @@ -81,23 +84,19 @@ def test_system_dataset_artifact_create(self): def test_google_dataset_artifact_create(self): - # Truncating the name because of resource id constraints from the service - dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/{self.artifact_display_name}" - aiplatform.init( project=e2e_base._PROJECT, location=e2e_base._LOCATION, ) artifact = google_artifact_schema.VertexDataset( - dataset_name=dataset_name, + dataset_name=self.artifact_id, display_name=self.artifact_display_name, uri=self.artifact_uri, metadata=self.artifact_metadata, description=self.artifact_description, ).create() expected_metadata = self.artifact_metadata - expected_metadata["resourceName"] = dataset_name assert artifact.display_name == self.artifact_display_name assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) From 6d89efc0583ec801a24fa0962955f02507029ebe Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 21 Jun 2022 19:52:37 +0000 Subject: [PATCH 29/48] change google and system to sub folders of schema --- .../artifact_schema.py} | 0 .../artifact_schema.py} | 0 .../execution_schema.py} | 0 tests/system/aiplatform/test_e2e_metadata_schema.py | 12 +++++++++--- tests/unit/aiplatform/test_metadata_schema.py | 12 +++++++++--- 5 files changed, 18 insertions(+), 6 deletions(-) rename google/cloud/aiplatform/metadata/schema/{google_artifact_schema.py => google/artifact_schema.py} (100%) rename google/cloud/aiplatform/metadata/schema/{system_artifact_schema.py => system/artifact_schema.py} (100%) rename google/cloud/aiplatform/metadata/schema/{system_execution_schema.py => system/execution_schema.py} (100%) diff --git a/google/cloud/aiplatform/metadata/schema/google_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py similarity index 100% rename from google/cloud/aiplatform/metadata/schema/google_artifact_schema.py rename to google/cloud/aiplatform/metadata/schema/google/artifact_schema.py diff --git a/google/cloud/aiplatform/metadata/schema/system_artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py similarity index 100% rename from google/cloud/aiplatform/metadata/schema/system_artifact_schema.py rename to google/cloud/aiplatform/metadata/schema/system/artifact_schema.py diff --git a/google/cloud/aiplatform/metadata/schema/system_execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py similarity index 100% rename from google/cloud/aiplatform/metadata/schema/system_execution_schema.py rename to google/cloud/aiplatform/metadata/schema/system/execution_schema.py diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index 807b8712ce..99925c68eb 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -21,9 +21,15 @@ from google.cloud import aiplatform from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import base_execution -from google.cloud.aiplatform.metadata.schema import google_artifact_schema -from google.cloud.aiplatform.metadata.schema import system_artifact_schema -from google.cloud.aiplatform.metadata.schema import system_execution_schema +from google.cloud.aiplatform.metadata.schema.google import ( + artifact_schema as google_artifact_schema, +) +from google.cloud.aiplatform.metadata.schema.system import ( + artifact_schema as system_artifact_schema, +) +from google.cloud.aiplatform.metadata.schema.system import ( + execution_schema as system_execution_schema, +) from tests.system.aiplatform import e2e_base diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 659856b5c5..6786dd3ab4 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -28,9 +28,15 @@ from google.cloud.aiplatform.metadata import metadata from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import base_execution -from google.cloud.aiplatform.metadata.schema import google_artifact_schema -from google.cloud.aiplatform.metadata.schema import system_artifact_schema -from google.cloud.aiplatform.metadata.schema import system_execution_schema +from google.cloud.aiplatform.metadata.schema.google import ( + artifact_schema as google_artifact_schema, +) +from google.cloud.aiplatform.metadata.schema.system import ( + artifact_schema as system_artifact_schema, +) +from google.cloud.aiplatform.metadata.schema.system import ( + execution_schema as system_execution_schema, +) from google.cloud.aiplatform.metadata.schema import utils from google.cloud.aiplatform_v1 import MetadataServiceClient from google.cloud.aiplatform_v1 import Artifact as GapicArtifact From 53ba40b8bef1eb543dc7698124342c7fba2a4d32 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 22 Jun 2022 12:20:30 +0000 Subject: [PATCH 30/48] use create_from_base_execution_schema instead of overloading create --- google/cloud/aiplatform/metadata/artifact.py | 2 +- google/cloud/aiplatform/metadata/execution.py | 90 +++++++++++++------ google/cloud/aiplatform/metadata/metadata.py | 2 +- .../metadata/schema/base_artifact.py | 2 +- .../metadata/schema/base_execution.py | 2 +- 5 files changed, 65 insertions(+), 33 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 9519a816eb..bb31f7c855 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -325,7 +325,7 @@ def create( ) @classmethod - def create_from_base_schema( + def create_from_base_artifact_schema( cls, *, base_artifact_schema: base_artifact.BaseArtifactSchema, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index a8f8fb8dba..5257a6b59b 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -90,8 +90,8 @@ def state(self) -> gca_execution.Execution.State: @classmethod def create( cls, + schema_title: str, *, - schema_title: Optional[str] = None, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, resource_id: Optional[str] = None, display_name: Optional[str] = None, @@ -102,15 +102,12 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials=Optional[auth_credentials.Credentials], - base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, ) -> "Execution": """ Creates a new Metadata Execution. - Args: schema_title (str): - Optional. schema_title identifies the schema title used by the Execution. - Either schema_title or base_execution_schema must be provided. + Required. schema_title identifies the schema title used by the Execution. state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. resource_id (str): @@ -140,38 +137,14 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. - base_execution_schema (BaseExecutionSchema): - Optional. An instance of the BaseExecutionSchema class that can be provided instead of providing schema specific parameters. It overrides - the values provided for schema_title, resource_id, state, display_name, schema_version, description, and metadata. - Returns: Execution: Instantiated representation of the managed Metadata Execution. - """ self = cls._empty_constructor( project=project, location=location, credentials=credentials ) super(base.VertexAiResourceNounWithFutureManager, self).__init__() - if base_execution_schema: - resource = Execution._create_resource( - client=self.api_client, - parent=metadata_store._MetadataStore._format_resource_name( - project=self.project, - location=self.location, - metadata_store=metadata_store_id, - ), - schema_title=base_execution_schema.schema_title, - resource_id=base_execution_schema.resource_id, - metadata=base_execution_schema.metadata, - description=base_execution_schema.description, - display_name=base_execution_schema.display_name, - schema_version=base_execution_schema.schema_version, - state=base_execution_schema.state, - ) - self._gca_resource = resource - return self - resource = Execution._create_resource( client=self.api_client, parent=metadata_store._MetadataStore._format_resource_name( @@ -191,6 +164,65 @@ def create( return self + @classmethod + def create_from_base_execution_schema( + cls, + *, + metadata_store_id: str = "default", + project: Optional[str] = None, + location: Optional[str] = None, + credentials=Optional[auth_credentials.Credentials], + base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, + ) -> "Execution": + """ + Creates a new Metadata Execution. + + Args: + base_execution_schema (BaseExecutionSchema): + Optional. An instance of the BaseExecutionSchema class that can + be provided instead of providing schema specific parameters. + metadata_store_id (str): + Optional. The portion of the resource name with + the format: + projects/123/locations/us-central1/metadataStores//artifacts/ + If not provided, the MetadataStore's ID will be set to "default". + project (str): + Optional. Project used to create this Execution. Overrides project set in + aiplatform.init. + location (str): + Optional. Location used to create this Execution. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials used to create this Execution. Overrides + credentials set in aiplatform.init. + + Returns: + Execution: Instantiated representation of the managed Metadata Execution. + + """ + self = cls._empty_constructor( + project=project, location=location, credentials=credentials + ) + super(base.VertexAiResourceNounWithFutureManager, self).__init__() + + resource = Execution._create_resource( + client=self.api_client, + parent=metadata_store._MetadataStore._format_resource_name( + project=self.project, + location=self.location, + metadata_store=metadata_store_id, + ), + schema_title=base_execution_schema.schema_title, + resource_id=base_execution_schema.resource_id, + metadata=base_execution_schema.metadata, + description=base_execution_schema.description, + display_name=base_execution_schema.display_name, + schema_version=base_execution_schema.schema_version, + state=base_execution_schema.state, + ) + self._gca_resource = resource + return self + def __enter__(self): if self.state is not gca_execution.Execution.State.RUNNING: self.update(state=gca_execution.Execution.State.RUNNING) diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 1a00b39612..ed4578fc05 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -612,7 +612,7 @@ def start_execution( run_execution.update(state=gca_execution.Execution.State.RUNNING) else: if base_execution_schema: - run_execution = execution.Execution.create( + run_execution = execution.Execution.create_from_base_execution_schema( base_execution_schema=base_execution_schema, metadata_store_id=metadata_store_id, project=project, diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index be76f61ec8..b18f23da31 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -115,7 +115,7 @@ def create( Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ - return artifact.Artifact.create_from_base_schema( + return artifact.Artifact.create_from_base_artifact_schema( base_artifact_schema=self, metadata_store_id=metadata_store_id, project=project, diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 5e49b1b63f..0fdf320349 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -103,7 +103,7 @@ def create( Execution: Instantiated representation of the managed Metadata Execution. """ - self.execution = execution.Execution.create( + self.execution = execution.Execution.create_from_base_execution_schema( base_execution_schema=self, metadata_store_id=metadata_store_id, project=project, From 1a8931a367488a3eb1afd52771bdd339242e9a31 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 22 Jun 2022 12:22:17 +0000 Subject: [PATCH 31/48] update api docs --- google/cloud/aiplatform/metadata/artifact.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index bb31f7c855..c65c4a3454 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -338,8 +338,8 @@ def create_from_base_artifact_schema( Args: base_artifact_schema (BaseArtifactSchema): - Required. An instance of the BaseArtifactType class that can be provided instead of providing artifact specific parameters. It overrides - the values provided for schema_title, resource_id, uri, display_name, schema_version, description, and metadata. + Required. An instance of the BaseArtifactType class that can be + provided instead of providing artifact specific parameters. metadata_store_id (str): Optional. The portion of the resource name with the format: From 026581695d3f3351844a6a67abf8834135a91116 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 22 Jun 2022 12:55:25 +0000 Subject: [PATCH 32/48] update docstring formatting --- google/cloud/aiplatform/metadata/artifact.py | 5 +- google/cloud/aiplatform/metadata/execution.py | 9 ++- google/cloud/aiplatform/metadata/metadata.py | 2 +- .../metadata/schema/base_artifact.py | 2 +- .../cloud/aiplatform/metadata/schema/utils.py | 67 ++++++++++++++++--- 5 files changed, 69 insertions(+), 16 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index c65c4a3454..590a93717f 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -15,7 +15,6 @@ # limitations under the License. # - from typing import Optional, Dict, Union import proto @@ -35,6 +34,7 @@ from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.utils import rest_utils + _LOGGER = base.Logger(__name__) @@ -265,9 +265,11 @@ def create( credentials: Optional[auth_credentials.Credentials] = None, ) -> "Artifact": """Creates a new Metadata Artifact. + Args: schema_title (str): Required. schema_title identifies the schema title used by the Artifact. + Please reference https://cloud.google.com/vertex-ai/docs/ml-metadata/system-schemas. resource_id (str): Optional. The portion of the Artifact name with @@ -306,6 +308,7 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Artifact. Overrides credentials set in aiplatform.init. + Returns: Artifact: Instantiated representation of the managed Metadata Artifact. """ diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 5257a6b59b..6915acee90 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -105,6 +105,7 @@ def create( ) -> "Execution": """ Creates a new Metadata Execution. + Args: schema_title (str): Required. schema_title identifies the schema title used by the Execution. @@ -137,8 +138,10 @@ def create( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. + Returns: Execution: Instantiated representation of the managed Metadata Execution. + """ self = cls._empty_constructor( project=project, location=location, credentials=credentials @@ -168,11 +171,11 @@ def create( def create_from_base_execution_schema( cls, *, - metadata_store_id: str = "default", + base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, + metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, - credentials=Optional[auth_credentials.Credentials], - base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, + credentials: Optional[auth_credentials.Credentials] = None, ) -> "Execution": """ Creates a new Metadata Execution. diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index ed4578fc05..fe2342a353 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -31,8 +31,8 @@ from google.cloud.aiplatform.metadata import execution from google.cloud.aiplatform.metadata import experiment_resources from google.cloud.aiplatform.metadata import experiment_run_resource -from google.cloud.aiplatform.tensorboard import tensorboard_resource from google.cloud.aiplatform.metadata.schema import base_execution +from google.cloud.aiplatform.tensorboard import tensorboard_resource _LOGGER = base.Logger(__name__) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index b18f23da31..8ee2aebc1a 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -38,7 +38,7 @@ class BaseArtifactSchema(object): Optional. The schema title used by the Artifact, defaults to "system.Artifact" resource_id (str): Optional. The portion of the Artifact name with - the format. This is globally unique in a metadataStore: + the following format, this is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual diff --git a/google/cloud/aiplatform/metadata/schema/utils.py b/google/cloud/aiplatform/metadata/schema/utils.py index 4269f0d10c..1b71c509e5 100644 --- a/google/cloud/aiplatform/metadata/schema/utils.py +++ b/google/cloud/aiplatform/metadata/schema/utils.py @@ -24,11 +24,23 @@ class PredictSchemata: Args: instance_schema_uri (str): - Required. Points to a YAML file stored on Google Cloud Storage describing the format of a single instance, which are used in PredictRequest.instances, ExplainRequest.instances and BatchPredictionJob.input_config. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + Required. Points to a YAML file stored on Google Cloud Storage + describing the format of a single instance, which are used in + PredictRequest.instances, ExplainRequest.instances and + BatchPredictionJob.input_config. The schema is defined as an + OpenAPI 3.0.2 `Schema Object. parameters_schema_uri (str): - Required. Points to a YAML file stored on Google Cloud Storage describing the parameters of prediction and explanation via PredictRequest.parameters, ExplainRequest.parameters and BatchPredictionJob.model_parameters. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + Required. Points to a YAML file stored on Google Cloud Storage + describing the parameters of prediction and explanation via + PredictRequest.parameters, ExplainRequest.parameters and + BatchPredictionJob.model_parameters. The schema is defined as an + OpenAPI 3.0.2 `Schema Object. prediction_schema_uri (str): - Required. Points to a YAML file stored on Google Cloud Storage describing the format of a single prediction produced by this Model, which are returned via PredictResponse.predictions, ExplainResponse.explanations, and BatchPredictionJob.output_config. The schema is defined as an OpenAPI 3.0.2 `Schema Object. + Required. Points to a YAML file stored on Google Cloud Storage + describing the format of a single prediction produced by this Model + , which are returned via PredictResponse.predictions, + ExplainResponse.explanations, and BatchPredictionJob.output_config. + The schema is defined as an OpenAPI 3.0.2 `Schema Object. """ instance_schema_uri: str @@ -50,19 +62,54 @@ class ContainerSpec: """Container configuration for the model. Args: image_uri (str): - Required. URI of the Docker image to be used as the custom container for serving predictions. This URI must identify an image in Artifact Registry or Container Registry. Learn more about the `container publishing requirements + Required. URI of the Docker image to be used as the custom + container for serving predictions. This URI must identify an image + in Artifact Registry or Container Registry. command (Sequence[str]): - Optional. Specifies the command that runs when the container starts. This overrides the container's `ENTRYPOINT + Optional. Specifies the command that runs when the container + starts. This overrides the container's `ENTRYPOINT`. args (Sequence[str]): - Optional. Specifies arguments for the command that runs when the container starts. This overrides the container's ```CMD`` + Optional. Specifies arguments for the command that runs when the + container starts. This overrides the container's `CMD` env (Sequence[google.cloud.aiplatform_v1.types.EnvVar]): - Optional. List of environment variables to set in the container. After the container starts running, code running in the container can read these environment variables. Additionally, the command and args fields can reference these variables. Later entries in this list can also reference earlier entries. For example, the following example sets the variable ``VAR_2`` to have the value ``foo bar``: .. code:: json [ { "name": "VAR_1", "value": "foo" }, { "name": "VAR_2", "value": "$(VAR_1) bar" } ] If you switch the order of the variables in the example, then the expansion does not occur. This field corresponds to the ``env`` field of the Kubernetes Containers `v1 core API. + Optional. List of environment variables to set in the container. + After the container starts running, code running in the container + can read these environment variables. Additionally, the command + and args fields can reference these variables. Later entries in + this list can also reference earlier entries. For example, the + following example sets the variable ``VAR_2`` to have the value + ``foo bar``: .. code:: json [ { "name": "VAR_1", "value": "foo" }, + { "name": "VAR_2", "value": "$(VAR_1) bar" } ] If you switch the + order of the variables in the example, then the expansion does not + occur. This field corresponds to the ``env`` field of the + Kubernetes Containers `v1 core API. ports (Sequence[google.cloud.aiplatform_v1.types.Port]): - Optional. List of ports to expose from the container. Vertex AI sends any prediction requests that it receives to the first port on this list. Vertex AI also sends `liveness and health checks. + Optional. List of ports to expose from the container. Vertex AI + sends any prediction requests that it receives to the first port on + this list. Vertex AI also sends `liveness and health checks. predict_route (str): - Optional. HTTP path on the container to send prediction requests to. Vertex AI forwards requests sent using projects.locations.endpoints.predict to this path on the container's IP address and port. Vertex AI then returns the container's response in the API response. For example, if you set this field to ``/foo``, then when Vertex AI receives a prediction request, it forwards the request body in a POST request to the ``/foo`` path on the port of your container specified by the first value of this ``ModelContainerSpec``'s ports field. If you don't specify this field, it defaults to the following value when you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1.EndpointService.DeployModel]: /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict The placeholders in this value are replaced as follows: - ENDPOINT: The last segment (following ``endpoints/``)of the Endpoint.name][] field of the Endpoint where this Model has been deployed. (Vertex AI makes this value available to your container code as the ```AIP_ENDPOINT_ID`` environment variable + Optional. HTTP path on the container to send prediction requests + to. Vertex AI forwards requests sent using + projects.locations.endpoints.predict to this path on the + container's IP address and port. Vertex AI then returns the + container's response in the API response. For example, if you set + this field to ``/foo``, then when Vertex AI receives a prediction + request, it forwards the request body in a POST request to the + ``/foo`` path on the port of your container specified by the first + value of this ``ModelContainerSpec``'s ports field. If you don't + specify this field, it defaults to the following value when you + deploy this Model to an Endpoint + /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict + The placeholders in this value are replaced as follows: + - ENDPOINT: The last segment (following ``endpoints/``)of the + Endpoint.name][] field of the Endpoint where this Model has + been deployed. (Vertex AI makes this value available to your + container code as the ```AIP_ENDPOINT_ID`` environment variable health_route (str): - Optional. HTTP path on the container to send health checks to. Vertex AI intermittently sends GET requests to this path on the container's IP address and port to check that the container is healthy. Read more about `health checks + Optional. HTTP path on the container to send health checks to. + Vertex AI intermittently sends GET requests to this path on the + container's IP address and port to check that the container is + healthy. Read more about `health checks display_name (str): """ From 59afcd63dc4e89e9b834665c6ea25505182c9dda Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 22 Jun 2022 15:40:59 -0700 Subject: [PATCH 33/48] Update google/cloud/aiplatform/metadata/metadata.py Co-authored-by: sasha-gitg <44654632+sasha-gitg@users.noreply.github.com> --- google/cloud/aiplatform/metadata/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index fe2342a353..5a634418cf 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -622,7 +622,7 @@ def start_execution( else: if not schema_title: raise ValueError( - "schema_title must be provided when starting a new Execution" + "schema_title or base_execution_schema must be provided when starting a new Execution" ) run_execution = execution.Execution.create( From 1e31adfec37fa40d558871f6ecdad9319eee71d9 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Wed, 22 Jun 2022 23:09:49 +0000 Subject: [PATCH 34/48] add return types and move args to constructor --- .../metadata/schema/base_artifact.py | 75 +++++++++---------- .../metadata/schema/base_execution.py | 53 +++++++------ .../metadata/schema/google/artifact_schema.py | 1 - 3 files changed, 62 insertions(+), 67 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 8ee2aebc1a..3d4f699757 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -25,43 +25,8 @@ class BaseArtifactSchema(object): - """Base class for Metadata Artifact types. + """Base class for Metadata Artifact types.""" - This is the base class for defining various artifact types, which can be - passed to google.Artifact to create a corresponding resource. - Artifacts carry a `metadata` field, which is a dictionary for storing - metadata related to this artifact. Subclasses from ArtifactType can enforce - various structure and field requirements for the metadata field. - - Args: - schema_title (str): - Optional. The schema title used by the Artifact, defaults to "system.Artifact" - resource_id (str): - Optional. The portion of the Artifact name with - the following format, this is globally unique in a metadataStore: - projects/123/locations/us-central1/metadataStores//artifacts/. - uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. - display_name (str): - Optional. The user-defined name of the Artifact. - schema_version (str): - Optional. schema_version specifies the version used by the Artifact. - If not set, defaults to use the latest version. - description (str): - Optional. Describes the purpose of the Artifact to be created. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Artifact. - state (google.cloud.gapic.types.Artifact.State): - Optional. The state of this Artifact. This is a - property of the Artifact, and does not imply or - capture any ongoing process. This property is - managed by clients (such as Vertex AI - Pipelines), and the system does not prescribe or - check the validity of state transitions. - """ - - ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" SCHEMA_TITLE = "system.Artifact" def __init__( @@ -76,7 +41,41 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): - """Initializes the Artifact with the given name, URI and metadata.""" + """Initializes the Artifact with the given name, URI and metadata. + + This is the base class for defining various artifact types, which can be + passed to google.Artifact to create a corresponding resource. + Artifacts carry a `metadata` field, which is a dictionary for storing + metadata related to this artifact. Subclasses from ArtifactType can enforce + various structure and field requirements for the metadata field. + + Args: + schema_title (str): + Optional. The schema title used by the Artifact, defaults to "system.Artifact" + resource_id (str): + Optional. The portion of the Artifact name with + the following format, this is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the Artifact. + schema_version (str): + Optional. schema_version specifies the version used by the Artifact. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. + """ self.schema_title = BaseArtifactSchema.SCHEMA_TITLE if schema_title: self.schema_title = schema_title @@ -94,7 +93,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ): + ) -> artifact.Artifact: """Creates a new Metadata Artifact. Args: diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 0fdf320349..6627b5c87c 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -26,31 +26,8 @@ class BaseExecutionSchema(object): - """Base class for Metadata Execution schema. - - This is the base class for defining various execution types. - - Args: - schema_title (str): - Required. schema_title identifies the schema title used by the Execution. - state (gca_execution.Execution.State.RUNNING): - Optional. State of this Execution. Defaults to RUNNING. - resource_id (str): - Optional. The portion of the Execution name with - the following format, this is globally unique in a metadataStore. - projects/123/locations/us-central1/metadataStores//executions/. - display_name (str): - Optional. The user-defined name of the Execution. - schema_version (str): - Optional. schema_version specifies the version used by the Execution. - If not set, defaults to use the latest version. - metadata (Dict): - Optional. Contains the metadata information that will be stored in the Execution. - description (str): - Optional. Describes the purpose of the Execution to be created. - """ - - ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" + """Base class for Metadata Execution schema.""" + SCHEMA_TITLE = "system.ContainerExecution" def __init__( @@ -64,7 +41,27 @@ def __init__( description: Optional[str] = None, ): - """Initializes the Execution with the given name, URI and metadata.""" + """Initializes the Execution with the given name, URI and metadata. + + Args: + schema_title (str): + Required. schema_title identifies the schema title used by the Execution. + state (gca_execution.Execution.State.RUNNING): + Optional. State of this Execution. Defaults to RUNNING. + resource_id (str): + Optional. The portion of the Execution name with + the following format, this is globally unique in a metadataStore. + projects/123/locations/us-central1/metadataStores//executions/. + display_name (str): + Optional. The user-defined name of the Execution. + schema_version (str): + Optional. schema_version specifies the version used by the Execution. + If not set, defaults to use the latest version. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Execution. + description (str): + Optional. Describes the purpose of the Execution to be created. + """ self.schema_title = BaseExecutionSchema.SCHEMA_TITLE if schema_title: self.schema_title = schema_title @@ -81,7 +78,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ): + ) -> execution.Execution: """Creates a new Metadata Execution. Args: @@ -118,7 +115,7 @@ def start_execution( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ): + ) -> execution.Execution: """Create and starts a new Metadata Execution. Args: diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index b30cf5751b..969839de22 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -116,7 +116,6 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} super(VertexModel, self).__init__( From c43fcbfc6e412276bfd543e20ce29ad08cc4c29e Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 00:04:16 +0000 Subject: [PATCH 35/48] using forward reference for parameter and return types to resolve circular import error --- google/cloud/aiplatform/metadata/artifact.py | 2 +- google/cloud/aiplatform/metadata/execution.py | 6 +++--- google/cloud/aiplatform/metadata/metadata.py | 2 +- google/cloud/aiplatform/metadata/schema/base_artifact.py | 2 +- google/cloud/aiplatform/metadata/schema/base_execution.py | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 590a93717f..975f7af933 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -331,7 +331,7 @@ def create( def create_from_base_artifact_schema( cls, *, - base_artifact_schema: base_artifact.BaseArtifactSchema, + base_artifact_schema: "base_artifact.BaseArtifactSchema", metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 6915acee90..bb5592bc22 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -170,8 +170,8 @@ def create( @classmethod def create_from_base_execution_schema( cls, + base_execution_schema: "base_execution.BaseExecutionSchema", *, - base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, @@ -182,8 +182,8 @@ def create_from_base_execution_schema( Args: base_execution_schema (BaseExecutionSchema): - Optional. An instance of the BaseExecutionSchema class that can - be provided instead of providing schema specific parameters. + An instance of the BaseExecutionSchema class that can be + provided instead of providing schema specific parameters. metadata_store_id (str): Optional. The portion of the resource name with the format: diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 5a634418cf..30bf5e5f7a 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -518,7 +518,7 @@ def start_execution( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - base_execution_schema: Optional[base_execution.BaseExecutionSchema] = None, + base_execution_schema: Optional["base_execution.BaseExecutionSchema"] = None, ) -> execution.Execution: """ Create and starts a new Metadata Execution or resumes a previously created Execution. diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 3d4f699757..2f1eb1c0ff 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -93,7 +93,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ) -> artifact.Artifact: + ) -> "artifact.Artifact": """Creates a new Metadata Artifact. Args: diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 6627b5c87c..adb4a5c770 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -78,7 +78,7 @@ def create( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ) -> execution.Execution: + ) -> "execution.Execution": """Creates a new Metadata Execution. Args: @@ -115,7 +115,7 @@ def start_execution( project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - ) -> execution.Execution: + ) -> "execution.Execution": """Create and starts a new Metadata Execution. Args: From 6311bfd34516e4ea0e3ae9efacfc699f92d2dc9a Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 04:09:56 +0000 Subject: [PATCH 36/48] change base classes to abstract classes --- .../metadata/schema/base_artifact.py | 17 +-- .../metadata/schema/base_execution.py | 17 +-- .../metadata/schema/google/artifact_schema.py | 12 +-- .../metadata/schema/system/artifact_schema.py | 55 ++++++++-- .../schema/system/execution_schema.py | 9 +- tests/unit/aiplatform/test_metadata_schema.py | 101 +++++++++--------- 6 files changed, 125 insertions(+), 86 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 2f1eb1c0ff..35f3b05778 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -15,6 +15,8 @@ # limitations under the License. # +import abc + from typing import Optional, Dict from google.auth import credentials as auth_credentials @@ -24,14 +26,18 @@ from google.cloud.aiplatform.metadata import constants -class BaseArtifactSchema(object): +class BaseArtifactSchema(metaclass=abc.ABCMeta): """Base class for Metadata Artifact types.""" - SCHEMA_TITLE = "system.Artifact" + @property + @classmethod + @abc.abstractmethod + def schema_title(cls) -> str: + """Identifies the Vertex Metadta schema title used by the resource.""" + pass def __init__( self, - schema_title: Optional[str] = None, resource_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, @@ -50,8 +56,6 @@ def __init__( various structure and field requirements for the metadata field. Args: - schema_title (str): - Optional. The schema title used by the Artifact, defaults to "system.Artifact" resource_id (str): Optional. The portion of the Artifact name with the following format, this is globally unique in a metadataStore: @@ -76,9 +80,6 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - self.schema_title = BaseArtifactSchema.SCHEMA_TITLE - if schema_title: - self.schema_title = schema_title self.resource_id = resource_id self.uri = uri self.display_name = display_name diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index adb4a5c770..916d47078d 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -15,6 +15,8 @@ # limitations under the License. # +import abc + from typing import Optional, Dict from google.auth import credentials as auth_credentials @@ -25,14 +27,18 @@ from google.cloud.aiplatform.metadata import metadata -class BaseExecutionSchema(object): +class BaseExecutionSchema(metaclass=abc.ABCMeta): """Base class for Metadata Execution schema.""" - SCHEMA_TITLE = "system.ContainerExecution" + @property + @classmethod + @abc.abstractmethod + def schema_title(cls) -> str: + """Identifies the Vertex Metadta schema title used by the resource.""" + pass def __init__( self, - schema_title: Optional[str] = None, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, resource_id: Optional[str] = None, display_name: Optional[str] = None, @@ -44,8 +50,6 @@ def __init__( """Initializes the Execution with the given name, URI and metadata. Args: - schema_title (str): - Required. schema_title identifies the schema title used by the Execution. state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. resource_id (str): @@ -62,9 +66,6 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - self.schema_title = BaseExecutionSchema.SCHEMA_TITLE - if schema_title: - self.schema_title = schema_title self.state = state self.resource_id = resource_id self.display_name = display_name diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index 969839de22..8af9945fa8 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -24,7 +24,7 @@ class VertexDataset(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Dataset.""" - SCHEMA_TITLE = "google.VertexDataset" + schema_title = "google.VertexDataset" def __init__( self, @@ -64,7 +64,6 @@ def __init__( """ extended_metadata = metadata or {} super(VertexDataset, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=dataset_id, uri=uri, display_name=display_name, @@ -78,7 +77,7 @@ def __init__( class VertexModel(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Model.""" - SCHEMA_TITLE = "google.VertexModel" + schema_title = "google.VertexModel" def __init__( self, @@ -119,7 +118,6 @@ def __init__( extended_metadata = metadata or {} super(VertexModel, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=vertex_model_id, uri=uri, display_name=display_name, @@ -133,7 +131,7 @@ def __init__( class VertexEndpoint(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Endpoint.""" - SCHEMA_TITLE = "google.VertexEndpoint" + schema_title = "google.VertexEndpoint" def __init__( self, @@ -174,7 +172,6 @@ def __init__( extended_metadata = metadata or {} super(VertexEndpoint, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=vertex_endpoint_id, uri=uri, display_name=display_name, @@ -188,7 +185,7 @@ def __init__( class UnmanagedContainerModel(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Unmanaged Container Model.""" - SCHEMA_TITLE = "google.UnmanagedContainerModel" + schema_title = "google.UnmanagedContainerModel" def __init__( self, @@ -236,7 +233,6 @@ def __init__( extended_metadata["containerSpec"] = container_spec.to_dict() super(UnmanagedContainerModel, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=unmanaged_container_model_id, uri=uri, display_name=display_name, diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index 401634b6b5..929c38702c 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -24,7 +24,7 @@ class Model(base_artifact.BaseArtifactSchema): """Artifact type for model.""" - SCHEMA_TITLE = "system.Model" + schema_title = "system.Model" def __init__( self, @@ -58,7 +58,52 @@ def __init__( """ extended_metadata = metadata or {} super(Model, self).__init__( - schema_title=self.SCHEMA_TITLE, + uri=uri, + display_name=display_name, + schema_version=schema_version, + description=description, + metadata=extended_metadata, + state=state, + ) + + +class Artifact(base_artifact.BaseArtifactSchema): + """A generic artifact.""" + + schema_title = "system.Artifact" + + def __init__( + self, + uri: Optional[str] = None, + display_name: Optional[str] = None, + schema_version: Optional[str] = None, + description: Optional[str] = None, + metadata: Optional[Dict] = None, + state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + ): + """Args: + uri (str): + Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual + artifact file. + display_name (str): + Optional. The user-defined name of the base. + schema_version (str): + Optional. schema_version specifies the version used by the base. + If not set, defaults to use the latest version. + description (str): + Optional. Describes the purpose of the Artifact to be created. + metadata (Dict): + Optional. Contains the metadata information that will be stored in the Artifact. + state (google.cloud.gapic.types.Artifact.State): + Optional. The state of this Artifact. This is a + property of the Artifact, and does not imply or + capture any ongoing process. This property is + managed by clients (such as Vertex AI + Pipelines), and the system does not prescribe or + check the validity of state transitions. + """ + extended_metadata = metadata or {} + super(Artifact, self).__init__( uri=uri, display_name=display_name, schema_version=schema_version, @@ -71,7 +116,7 @@ def __init__( class Dataset(base_artifact.BaseArtifactSchema): """An artifact representing a system Dataset.""" - SCHEMA_TITLE = "system.Dataset" + schema_title = "system.Dataset" def __init__( self, @@ -105,7 +150,6 @@ def __init__( """ extended_metadata = metadata or {} super(Dataset, self).__init__( - schema_title=self.SCHEMA_TITLE, uri=uri, display_name=display_name, schema_version=schema_version, @@ -118,7 +162,7 @@ def __init__( class Metrics(base_artifact.BaseArtifactSchema): """Artifact schema for scalar metrics.""" - SCHEMA_TITLE = "system.Metrics" + schema_title = "system.Metrics" def __init__( self, @@ -183,7 +227,6 @@ def __init__( extended_metadata["mean_squared_error"] = mean_squared_error super(Metrics, self).__init__( - schema_title=self.SCHEMA_TITLE, uri=uri, display_name=display_name, schema_version=schema_version, diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index 3efce269f5..9ef540c73f 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -24,7 +24,7 @@ class ContainerExecution(base_execution.BaseExecutionSchema): """Execution schema for a container execution.""" - SCHEMA_TITLE = "system.ContainerExecution" + schema_title = "system.ContainerExecution" def __init__( self, @@ -54,7 +54,6 @@ def __init__( """ extended_metadata = metadata or {} super(ContainerExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=execution_id, state=state, display_name=display_name, @@ -67,7 +66,7 @@ def __init__( class CustomJobExecution(base_execution.BaseExecutionSchema): """Execution schema for a custom job execution.""" - SCHEMA_TITLE = "system.CustomJobExecution" + schema_title = "system.CustomJobExecution" def __init__( self, @@ -97,7 +96,6 @@ def __init__( """ extended_metadata = metadata or {} super(CustomJobExecution, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=execution_id, state=state, display_name=display_name, @@ -110,7 +108,7 @@ def __init__( class Run(base_execution.BaseExecutionSchema): """Execution schema for root run execution.""" - SCHEMA_TITLE = "system.Run" + schema_title = "system.Run" def __init__( self, @@ -140,7 +138,6 @@ def __init__( """ extended_metadata = metadata or {} super(Run, self).__init__( - schema_title=self.SCHEMA_TITLE, resource_id=execution_id, state=state, display_name=display_name, diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 6786dd3ab4..760d668ac6 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -16,10 +16,11 @@ # import json +import pytest + from importlib import reload from unittest import mock from unittest.mock import patch -import pytest from google.cloud import aiplatform from google.cloud.aiplatform import initializer @@ -114,35 +115,29 @@ def setup_method(self): def teardown_method(self): initializer.global_pool.shutdown(wait=True) - def test_base_class_overrides_default_schema_title(self): - artifact = base_artifact.BaseArtifactSchema(schema_title=_TEST_SCHEMA_TITLE) - assert artifact.schema_title == _TEST_SCHEMA_TITLE + def test_base_class_instatiated_uses_schema_title(self): + class TestArtifact(base_artifact.BaseArtifactSchema): + schema_title = _TEST_SCHEMA_TITLE - def test_base_class_overrides_default_state(self): - artifact = base_artifact.BaseArtifactSchema(state=_TEST_ARTIFACT_STATE) - assert artifact.state == _TEST_ARTIFACT_STATE - - def test_base_class_default_schema_title(self): - artifact = base_artifact.BaseArtifactSchema() - assert artifact.schema_title == "system.Artifact" + artifact = TestArtifact() + assert artifact.schema_title == _TEST_SCHEMA_TITLE - def test_base_class_default_state(self): - artifact = base_artifact.BaseArtifactSchema() - assert artifact.state == gca_artifact.Artifact.State.LIVE + def test_base_class_parameters_overrides_default_values(self): + class TestArtifact(base_artifact.BaseArtifactSchema): + schema_title = _TEST_SCHEMA_TITLE - def test_base_class_overrides_default_version(self): - artifact = base_artifact.BaseArtifactSchema(schema_version=_TEST_SCHEMA_VERSION) - assert artifact.schema_version == _TEST_SCHEMA_VERSION - - def test_base_class_init_remaining_parameters_are_assigned_correctly(self): - artifact = base_artifact.BaseArtifactSchema( + artifact = TestArtifact( + state=_TEST_ARTIFACT_STATE, + schema_version=_TEST_SCHEMA_VERSION, resource_id=_TEST_ARTIFACT_ID, - schema_title=_TEST_SCHEMA_TITLE, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert artifact.state == _TEST_ARTIFACT_STATE + assert artifact.state == _TEST_ARTIFACT_STATE + assert artifact.schema_version == _TEST_SCHEMA_VERSION assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.schema_title == _TEST_SCHEMA_TITLE assert artifact.uri == _TEST_URI @@ -150,11 +145,18 @@ def test_base_class_init_remaining_parameters_are_assigned_correctly(self): assert artifact.description == _TEST_DESCRIPTION assert artifact.metadata == _TEST_UPDATED_METADATA + def test_base_class_without_schema_title_raises_error(self): + with pytest.raises(TypeError): + base_artifact.BaseArtifactSchema() + @pytest.mark.usefixtures("create_artifact_mock") def test_create_is_called_with_default_parameters(self, create_artifact_mock): aiplatform.init(project=_TEST_PROJECT) - artifact = base_artifact.BaseArtifactSchema( - schema_title=_TEST_SCHEMA_TITLE, + + class TestArtifact(base_artifact.BaseArtifactSchema): + schema_title = _TEST_SCHEMA_TITLE + + artifact = TestArtifact( uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, @@ -184,50 +186,46 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_base_class_overrides_default_schema_title(self): - execution = base_execution.BaseExecutionSchema(schema_title=_TEST_SCHEMA_TITLE) - assert execution.schema_title == _TEST_SCHEMA_TITLE + class TestExecution(base_execution.BaseExecutionSchema): + schema_title = _TEST_SCHEMA_TITLE - def test_base_class_overrides_default_state(self): - execution = base_execution.BaseExecutionSchema(state=_TEST_EXECUTION_STATE) - assert execution.state == _TEST_EXECUTION_STATE - - def test_base_class_default_schema_title(self): - execution = base_execution.BaseExecutionSchema() - assert execution.schema_title == "system.ContainerExecution" + execution = TestExecution() + assert execution.schema_title == _TEST_SCHEMA_TITLE - def test_base_class_default_state(self): - execution = base_execution.BaseExecutionSchema() - assert execution.state == gca_execution.Execution.State.RUNNING + def test_base_class_parameters_overrides_default_values(self): + class TestExecution(base_execution.BaseExecutionSchema): + schema_title = _TEST_SCHEMA_TITLE - def test_base_class_overrides_default_version(self): - execution = base_execution.BaseExecutionSchema( - schema_version=_TEST_SCHEMA_VERSION - ) - assert execution.schema_version == _TEST_SCHEMA_VERSION - - def test_base_class_init_remaining_parameters_are_assigned_correctly(self): - execution = base_execution.BaseExecutionSchema( - resource_id=_TEST_EXECUTION_ID, - schema_title=_TEST_SCHEMA_TITLE, + execution = TestExecution( state=_TEST_EXECUTION_STATE, + schema_version=_TEST_SCHEMA_VERSION, + resource_id=_TEST_EXECUTION_ID, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + assert execution.state == _TEST_EXECUTION_STATE + assert execution.schema_version == _TEST_SCHEMA_VERSION assert execution.resource_id == _TEST_EXECUTION_ID assert execution.schema_title == _TEST_SCHEMA_TITLE - assert execution.state == _TEST_EXECUTION_STATE assert execution.display_name == _TEST_DISPLAY_NAME assert execution.description == _TEST_DESCRIPTION assert execution.metadata == _TEST_UPDATED_METADATA + def test_base_class_without_schema_title_raises_error(self): + with pytest.raises(TypeError): + base_execution.BaseExecutionSchema() + @pytest.mark.usefixtures("create_execution_mock") def test_create_method_calls_gapic_library_with_correct_parameters( self, create_execution_mock ): aiplatform.init(project=_TEST_PROJECT) - execution = base_execution.BaseExecutionSchema( - schema_title=_TEST_SCHEMA_TITLE, + + class TestExecution(base_execution.BaseExecutionSchema): + schema_title = _TEST_SCHEMA_TITLE + + execution = TestExecution( state=_TEST_EXECUTION_STATE, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, @@ -249,8 +247,11 @@ def test_start_execution_method_calls_gapic_library_with_correct_parameters( self, create_execution_mock ): aiplatform.init(project=_TEST_PROJECT) - execution = base_execution.BaseExecutionSchema( - schema_title=_TEST_SCHEMA_TITLE, + + class TestExecution(base_execution.BaseExecutionSchema): + schema_title = _TEST_SCHEMA_TITLE + + execution = TestExecution( state=_TEST_EXECUTION_STATE, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, From 04c9e8838e926cb527faca6b9e1bb5e87b54144b Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 04:17:44 +0000 Subject: [PATCH 37/48] Add tests for system.artifact type --- tests/unit/aiplatform/test_metadata_schema.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 760d668ac6..3f31c170b2 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -410,6 +410,24 @@ def test_system_dataset_constructor_parameters_are_set_correctly(self): assert artifact.metadata == _TEST_UPDATED_METADATA assert artifact.schema_version == _TEST_SCHEMA_VERSION + def test_system_artifact_schema_title_is_set_correctly(self): + artifact = system_artifact_schema.Artifact() + assert artifact.schema_title == "system.Artifact" + + def test_system_artifact_constructor_parameters_are_set_correctly(self): + artifact = system_artifact_schema.Artifact( + uri=_TEST_URI, + display_name=_TEST_DISPLAY_NAME, + schema_version=_TEST_SCHEMA_VERSION, + description=_TEST_DESCRIPTION, + metadata=_TEST_UPDATED_METADATA, + ) + assert artifact.uri == _TEST_URI + assert artifact.display_name == _TEST_DISPLAY_NAME + assert artifact.description == _TEST_DESCRIPTION + assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.schema_version == _TEST_SCHEMA_VERSION + def test_system_model_schema_title_is_set_correctly(self): artifact = system_artifact_schema.Model() assert artifact.schema_title == "system.Model" From 1778a347b2c83ec1c03a9b43d6fb588e95c1ef6b Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 07:59:46 +0000 Subject: [PATCH 38/48] use resouce name instead of id and populate metadata with resourceNanme accoridngly --- google/cloud/aiplatform/metadata/artifact.py | 2 +- google/cloud/aiplatform/metadata/execution.py | 2 +- .../metadata/schema/base_artifact.py | 4 +- .../metadata/schema/base_execution.py | 6 +- .../metadata/schema/google/artifact_schema.py | 81 +++++++++++++------ .../metadata/schema/system/artifact_schema.py | 33 ++++++-- .../schema/system/execution_schema.py | 15 ++-- tests/unit/aiplatform/test_metadata_schema.py | 56 ++++++++----- 8 files changed, 130 insertions(+), 69 deletions(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 975f7af933..7402a5034b 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -363,7 +363,7 @@ def create_from_base_artifact_schema( """ return cls._create( - resource_id=base_artifact_schema.resource_id, + resource_id=base_artifact_schema.artifact_id, schema_title=base_artifact_schema.schema_title, uri=base_artifact_schema.uri, display_name=base_artifact_schema.display_name, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index bb5592bc22..db82472ae8 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -216,7 +216,7 @@ def create_from_base_execution_schema( metadata_store=metadata_store_id, ), schema_title=base_execution_schema.schema_title, - resource_id=base_execution_schema.resource_id, + resource_id=base_execution_schema.execution_id, metadata=base_execution_schema.metadata, description=base_execution_schema.description, display_name=base_execution_schema.display_name, diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 35f3b05778..afde35c3fc 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -38,7 +38,7 @@ def schema_title(cls) -> str: def __init__( self, - resource_id: Optional[str] = None, + artifact_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -80,7 +80,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - self.resource_id = resource_id + self.artifact_id = artifact_id self.uri = uri self.display_name = display_name self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 916d47078d..cad7db318e 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -40,7 +40,7 @@ def schema_title(cls) -> str: def __init__( self, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, - resource_id: Optional[str] = None, + execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, metadata: Optional[Dict] = None, @@ -52,7 +52,7 @@ def __init__( Args: state (gca_execution.Execution.State.RUNNING): Optional. State of this Execution. Defaults to RUNNING. - resource_id (str): + execution_id (str): Optional. The portion of the Execution name with the following format, this is globally unique in a metadataStore. projects/123/locations/us-central1/metadataStores//executions/. @@ -67,7 +67,7 @@ def __init__( Optional. Describes the purpose of the Execution to be created. """ self.state = state - self.resource_id = resource_id + self.execution_id = execution_id self.display_name = display_name self.schema_version = schema_version or constants._DEFAULT_SCHEMA_VERSION self.metadata = metadata diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index 8af9945fa8..cc3ed67db1 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -20,6 +20,9 @@ from google.cloud.aiplatform.metadata.schema import base_artifact from google.cloud.aiplatform.metadata.schema import utils +# The artifact property key for the resource_name +_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME = "resourceName" + class VertexDataset(base_artifact.BaseArtifactSchema): """An artifact representing a Vertex Dataset.""" @@ -28,8 +31,9 @@ class VertexDataset(base_artifact.BaseArtifactSchema): def __init__( self, - dataset_id: Optional[str] = None, - uri: Optional[str] = None, + dataset_name: str, + uri: str, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -37,14 +41,20 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - dataset_id (str): - Optional. The portion of the Artifact name, in a form of - projects/{project}/locations/{location}/datasets/{datasets_id}. For + dataset_name (str): + The name of the Dataset resource, in a form of + projects/{project}/locations/{location}/datasets/{dataset}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets/get uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. + The Vertex Dataset resource uri, in a form of + https://{service-endpoint}/v1/{dataset_name}, + where {service-endpoint} is one of the supported service endpoints at + https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -63,9 +73,11 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} + extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = dataset_name + super(VertexDataset, self).__init__( - resource_id=dataset_id, uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, @@ -81,8 +93,9 @@ class VertexModel(base_artifact.BaseArtifactSchema): def __init__( self, - vertex_model_id: Optional[str] = None, - uri: Optional[str] = None, + vertex_model_name: str, + uri: str, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -90,14 +103,20 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - vertex_model_id (str): - Optional. The portion of the Artifact name, in a form of - projects/{project}/locations/{location}/models/{model_id}. For + vertex_model_name (str): + The name of the Model resource, in a form of + projects/{project}/locations/{location}/models/{model}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models/get uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. + The Vertex Model resource uri, in a form of + https://{service-endpoint}/v1/{vertex_model_name}, + where {service-endpoint} is one of the supported service endpoints at + https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -116,10 +135,11 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata or {} + extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_model_name super(VertexModel, self).__init__( - resource_id=vertex_model_id, uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, @@ -135,8 +155,9 @@ class VertexEndpoint(base_artifact.BaseArtifactSchema): def __init__( self, - vertex_endpoint_id: Optional[str] = None, - uri: Optional[str] = None, + vertex_endpoint_name: str, + uri: str, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -145,13 +166,19 @@ def __init__( ): """Args: vertex_endpoint_name (str): - Optional. The portion of the Artifact name, in a form of - projects/{project}/locations/{location}/endpoints/{endpoint_id}. For + The name of the Endpoint resource, in a form of + projects/{project}/locations/{location}/endpoints/{endpoint}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/get uri (str): - Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual - artifact file. + The Vertex Endpoint resource uri, in a form of + https://{service-endpoint}/v1/{vertex_endpoint_name}, + where {service-endpoint} is one of the supported service endpoints at + https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the Artifact. schema_version (str): @@ -171,9 +198,11 @@ def __init__( """ extended_metadata = metadata or {} + extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_endpoint_name + super(VertexEndpoint, self).__init__( - resource_id=vertex_endpoint_id, uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, @@ -191,7 +220,7 @@ def __init__( self, predict_schema_ta: utils.PredictSchemata, container_spec: utils.ContainerSpec, - unmanaged_container_model_id: Optional[str] = None, + artifact_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -204,7 +233,7 @@ def __init__( An instance of PredictSchemata which holds instance, parameter and prediction schema uris. container_spec (ContainerSpec): An instance of ContainerSpec which holds the container configuration for the model. - unmanaged_container_model_id (str): + artifact_id (str): Optional. The portion of the Artifact name with the format. This is globally unique in a metadataStore: projects/123/locations/us-central1/metadataStores//artifacts/. @@ -233,8 +262,8 @@ def __init__( extended_metadata["containerSpec"] = container_spec.to_dict() super(UnmanagedContainerModel, self).__init__( - resource_id=unmanaged_container_model_id, uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index 929c38702c..c94461bae3 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -29,6 +29,7 @@ class Model(base_artifact.BaseArtifactSchema): def __init__( self, uri: Optional[str] = None, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -39,6 +40,10 @@ def __init__( uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -56,13 +61,13 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} super(Model, self).__init__( uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, state=state, ) @@ -75,6 +80,7 @@ class Artifact(base_artifact.BaseArtifactSchema): def __init__( self, uri: Optional[str] = None, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -85,6 +91,10 @@ def __init__( uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -102,13 +112,13 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} super(Artifact, self).__init__( uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, state=state, ) @@ -121,6 +131,7 @@ class Dataset(base_artifact.BaseArtifactSchema): def __init__( self, uri: Optional[str] = None, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -131,6 +142,10 @@ def __init__( uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -148,13 +163,13 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} super(Dataset, self).__init__( uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, state=state, ) @@ -173,6 +188,7 @@ def __init__( mean_absolute_error: Optional[float] = None, mean_squared_error: Optional[float] = None, uri: Optional[str] = None, + artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, description: Optional[str] = None, @@ -195,6 +211,10 @@ def __init__( uri (str): Optional. The uniform resource identifier of the artifact file. May be empty if there is no actual artifact file. + artifact_id (str): + Optional. The portion of the Artifact name with + the format. This is globally unique in a metadataStore: + projects/123/locations/us-central1/metadataStores//artifacts/. display_name (str): Optional. The user-defined name of the base. schema_version (str): @@ -228,6 +248,7 @@ def __init__( super(Metrics, self).__init__( uri=uri, + artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index 9ef540c73f..eef556aad0 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -52,14 +52,13 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata or {} super(ContainerExecution, self).__init__( - resource_id=execution_id, + execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, ) @@ -94,14 +93,13 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata or {} super(CustomJobExecution, self).__init__( - resource_id=execution_id, + execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, ) @@ -136,12 +134,11 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata or {} super(Run, self).__init__( - resource_id=execution_id, + execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=extended_metadata, + metadata=metadata, ) diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 3f31c170b2..8cccd88b0a 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -129,7 +129,7 @@ class TestArtifact(base_artifact.BaseArtifactSchema): artifact = TestArtifact( state=_TEST_ARTIFACT_STATE, schema_version=_TEST_SCHEMA_VERSION, - resource_id=_TEST_ARTIFACT_ID, + artifact_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, @@ -138,7 +138,7 @@ class TestArtifact(base_artifact.BaseArtifactSchema): assert artifact.state == _TEST_ARTIFACT_STATE assert artifact.state == _TEST_ARTIFACT_STATE assert artifact.schema_version == _TEST_SCHEMA_VERSION - assert artifact.resource_id == _TEST_ARTIFACT_ID + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.schema_title == _TEST_SCHEMA_TITLE assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME @@ -199,14 +199,14 @@ class TestExecution(base_execution.BaseExecutionSchema): execution = TestExecution( state=_TEST_EXECUTION_STATE, schema_version=_TEST_SCHEMA_VERSION, - resource_id=_TEST_EXECUTION_ID, + execution_id=_TEST_EXECUTION_ID, display_name=_TEST_DISPLAY_NAME, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) assert execution.state == _TEST_EXECUTION_STATE assert execution.schema_version == _TEST_SCHEMA_VERSION - assert execution.resource_id == _TEST_EXECUTION_ID + assert execution.execution_id == _TEST_EXECUTION_ID assert execution.schema_title == _TEST_SCHEMA_TITLE assert execution.display_name == _TEST_DISPLAY_NAME assert execution.description == _TEST_DESCRIPTION @@ -279,63 +279,69 @@ def teardown_method(self): initializer.global_pool.shutdown(wait=True) def test_vertex_dataset_schema_title_is_set_correctly(self): - artifact = google_artifact_schema.VertexDataset() + artifact = google_artifact_schema.VertexDataset( + dataset_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + ) assert artifact.schema_title == "google.VertexDataset" def test_vertex_dataset_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexDataset( - dataset_id=_TEST_ARTIFACT_ID, + dataset_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, - metadata=_TEST_UPDATED_METADATA, + metadata={}, ) - assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_model_schema_title_is_set_correctly(self): - artifact = google_artifact_schema.VertexModel() + artifact = google_artifact_schema.VertexModel( + vertex_model_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + ) assert artifact.schema_title == "google.VertexModel" def test_vertex_model_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexModel( - vertex_model_id=_TEST_ARTIFACT_ID, + vertex_model_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, - metadata=_TEST_UPDATED_METADATA, + metadata={}, ) - assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_endpoint_schema_title_is_set_correctly(self): - artifact = google_artifact_schema.VertexEndpoint() + artifact = google_artifact_schema.VertexEndpoint( + vertex_endpoint_name=_TEST_ARTIFACT_NAME, + uri=_TEST_URI, + ) assert artifact.schema_title == "google.VertexEndpoint" def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexEndpoint( - vertex_endpoint_id=_TEST_ARTIFACT_ID, + vertex_endpoint_name=_TEST_ARTIFACT_NAME, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, - metadata=_TEST_UPDATED_METADATA, + metadata={}, ) - assert artifact.resource_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == _TEST_UPDATED_METADATA + assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_unmanaged_container_model_title_is_set_correctly(self): @@ -368,14 +374,14 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self artifact = google_artifact_schema.UnmanagedContainerModel( predict_schema_ta=predict_schema_ta, container_spec=container_spec, - unmanaged_container_model_id=_TEST_ARTIFACT_ID, + artifact_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) - assert artifact.resource_id == _TEST_ARTIFACT_ID + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION @@ -399,12 +405,14 @@ def test_system_dataset_schema_title_is_set_correctly(self): def test_system_dataset_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Dataset( uri=_TEST_URI, + artifact_id=_TEST_ARTIFACT_ID, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) assert artifact.uri == _TEST_URI + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION assert artifact.metadata == _TEST_UPDATED_METADATA @@ -417,12 +425,14 @@ def test_system_artifact_schema_title_is_set_correctly(self): def test_system_artifact_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Artifact( uri=_TEST_URI, + artifact_id=_TEST_ARTIFACT_ID, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) assert artifact.uri == _TEST_URI + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION assert artifact.metadata == _TEST_UPDATED_METADATA @@ -435,12 +445,14 @@ def test_system_model_schema_title_is_set_correctly(self): def test_system_model_constructor_parameters_are_set_correctly(self): artifact = system_artifact_schema.Model( uri=_TEST_URI, + artifact_id=_TEST_ARTIFACT_ID, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) assert artifact.uri == _TEST_URI + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION assert artifact.metadata == _TEST_UPDATED_METADATA @@ -462,6 +474,7 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): f1score=0.4, mean_absolute_error=0.5, mean_squared_error=0.6, + artifact_id=_TEST_ARTIFACT_ID, uri=_TEST_URI, display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, @@ -469,6 +482,7 @@ def test_system_metrics_constructor_parameters_are_set_correctly(self): metadata=_TEST_UPDATED_METADATA, ) assert artifact.uri == _TEST_URI + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION assert artifact.schema_version == _TEST_SCHEMA_VERSION From bee8e931abb0c5e9feb3a87861b99490832904ab Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 16:48:10 +0000 Subject: [PATCH 39/48] remove start_execution from this pr and move to a separate PR --- google/cloud/aiplatform/metadata/metadata.py | 44 ++++++------------- .../metadata/schema/base_execution.py | 38 ---------------- .../aiplatform/test_e2e_metadata_schema.py | 17 ------- tests/unit/aiplatform/test_metadata_schema.py | 26 ----------- 4 files changed, 14 insertions(+), 111 deletions(-) diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index 30bf5e5f7a..1b533d5176 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -31,7 +31,6 @@ from google.cloud.aiplatform.metadata import execution from google.cloud.aiplatform.metadata import experiment_resources from google.cloud.aiplatform.metadata import experiment_run_resource -from google.cloud.aiplatform.metadata.schema import base_execution from google.cloud.aiplatform.tensorboard import tensorboard_resource _LOGGER = base.Logger(__name__) @@ -513,12 +512,10 @@ def start_execution( metadata: Optional[Dict[str, Any]] = None, schema_version: Optional[str] = None, description: Optional[str] = None, - metadata_store_id: Optional[str] = "default", resume: bool = False, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - base_execution_schema: Optional["base_execution.BaseExecutionSchema"] = None, ) -> execution.Execution: """ Create and starts a new Metadata Execution or resumes a previously created Execution. @@ -568,9 +565,6 @@ def start_execution( credentials (auth_credentials.Credentials): Optional. Custom credentials used to create this Execution. Overrides credentials set in aiplatform.init. - base_execution_schema (BaseExecutionSchema): - Optional. An instance of the BaseExecutionSchema class that can be provided instead of providing schema specific parameters. It overrides - the values provided for schema_title, resource_id, state, display_name, schema_version, description, and metadata. Returns: Execution: Instantiated representation of the managed Metadata Execution. @@ -611,32 +605,22 @@ def start_execution( run_execution.update(state=gca_execution.Execution.State.RUNNING) else: - if base_execution_schema: - run_execution = execution.Execution.create_from_base_execution_schema( - base_execution_schema=base_execution_schema, - metadata_store_id=metadata_store_id, - project=project, - location=location, - credentials=credentials, + if not schema_title: + raise ValueError( + "schema_title must be provided when starting a new Execution" ) - else: - if not schema_title: - raise ValueError( - "schema_title or base_execution_schema must be provided when starting a new Execution" - ) - run_execution = execution.Execution.create( - display_name=display_name, - schema_title=schema_title, - schema_version=schema_version, - metadata=metadata, - description=description, - metadata_store_id=metadata_store_id, - resource_id=resource_id, - project=project, - location=location, - credentials=credentials, - ) + run_execution = execution.Execution.create( + display_name=display_name, + schema_title=schema_title, + schema_version=schema_version, + metadata=metadata, + description=description, + resource_id=resource_id, + project=project, + location=location, + credentials=credentials, + ) if self.experiment_run: if self.experiment_run._is_legacy_experiment_run(): diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index cad7db318e..1c47855904 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -24,7 +24,6 @@ from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata import constants from google.cloud.aiplatform.metadata import execution -from google.cloud.aiplatform.metadata import metadata class BaseExecutionSchema(metaclass=abc.ABCMeta): @@ -109,40 +108,3 @@ def create( credentials=credentials, ) return self.execution - - def start_execution( - self, - metadata_store_id: Optional[str] = "default", - project: Optional[str] = None, - location: Optional[str] = None, - credentials: Optional[auth_credentials.Credentials] = None, - ) -> "execution.Execution": - """Create and starts a new Metadata Execution. - - Args: - metadata_store_id (str): - Optional. The portion of the resource name with - the format: - projects/123/locations/us-central1/metadataStores//executions/ - If not provided, the MetadataStore's ID will be set to "default". - project (str): - Optional. Project used to create this Execution. Overrides project set in - aiplatform.init. - location (str): - Optional. Location used to create this Execution. Overrides location set in - aiplatform.init. - credentials (auth_credentials.Credentials): - Optional. Custom credentials used to create this Execution. Overrides - credentials set in aiplatform.init. - Returns: - Execution: Instantiated representation of the managed Metadata Execution. - - """ - return metadata._ExperimentTracker().start_execution( - base_execution_schema=self, - resume=False, - metadata_store_id=metadata_store_id, - project=project, - location=location, - credentials=credentials, - ) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index 99925c68eb..70367bed44 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -143,20 +143,3 @@ def test_execution_create_using_system_schema_class(self): assert execution.schema_title == "system.CustomJobExecution" assert execution.description == self.execution_description assert "/metadataStores/default/executions/" in execution.resource_name - - def test_execution_start_execution_using_system_schema_class(self): - - aiplatform.init( - project=e2e_base._PROJECT, - location=e2e_base._LOCATION, - ) - - execution = system_execution_schema.ContainerExecution( - display_name=self.execution_display_name, - description=self.execution_description, - ).start_execution() - - assert execution.display_name == self.execution_display_name - assert execution.schema_title == "system.ContainerExecution" - assert execution.description == self.execution_description - assert "/metadataStores/default/executions/" in execution.resource_name diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 8cccd88b0a..900869533b 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -242,32 +242,6 @@ class TestExecution(base_execution.BaseExecutionSchema): assert kwargs["execution"].description == _TEST_DESCRIPTION assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA - @pytest.mark.usefixtures("create_execution_mock") - def test_start_execution_method_calls_gapic_library_with_correct_parameters( - self, create_execution_mock - ): - aiplatform.init(project=_TEST_PROJECT) - - class TestExecution(base_execution.BaseExecutionSchema): - schema_title = _TEST_SCHEMA_TITLE - - execution = TestExecution( - state=_TEST_EXECUTION_STATE, - display_name=_TEST_DISPLAY_NAME, - description=_TEST_DESCRIPTION, - metadata=_TEST_UPDATED_METADATA, - ) - execution.start_execution(metadata_store_id=_TEST_METADATA_STORE) - create_execution_mock.assert_called_once_with( - parent=_TEST_PARENT, execution=mock.ANY, execution_id=None - ) - _, _, kwargs = create_execution_mock.mock_calls[0] - assert kwargs["execution"].schema_title == _TEST_SCHEMA_TITLE - assert kwargs["execution"].state == _TEST_EXECUTION_STATE - assert kwargs["execution"].display_name == _TEST_DISPLAY_NAME - assert kwargs["execution"].description == _TEST_DESCRIPTION - assert kwargs["execution"].metadata == _TEST_UPDATED_METADATA - class TestMetadataGoogleArtifactSchema: def setup_method(self): From 5e87e0a37ee19fcc35bd7b4c33c4308a9d75281c Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 16:59:37 +0000 Subject: [PATCH 40/48] change all args to keyword args --- google/cloud/aiplatform/metadata/execution.py | 2 +- google/cloud/aiplatform/metadata/schema/base_artifact.py | 2 ++ google/cloud/aiplatform/metadata/schema/base_execution.py | 2 ++ .../aiplatform/metadata/schema/google/artifact_schema.py | 4 ++++ .../aiplatform/metadata/schema/system/artifact_schema.py | 4 ++++ .../aiplatform/metadata/schema/system/execution_schema.py | 3 +++ 6 files changed, 16 insertions(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index db82472ae8..9a0c7d40c1 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -170,8 +170,8 @@ def create( @classmethod def create_from_base_execution_schema( cls, - base_execution_schema: "base_execution.BaseExecutionSchema", *, + base_execution_schema: "base_execution.BaseExecutionSchema", metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index afde35c3fc..1017534133 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -38,6 +38,7 @@ def schema_title(cls) -> str: def __init__( self, + *, artifact_id: Optional[str] = None, uri: Optional[str] = None, display_name: Optional[str] = None, @@ -90,6 +91,7 @@ def __init__( def create( self, + *, metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 1c47855904..170768d70e 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -38,6 +38,7 @@ def schema_title(cls) -> str: def __init__( self, + *, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, @@ -74,6 +75,7 @@ def __init__( def create( self, + *, metadata_store_id: Optional[str] = "default", project: Optional[str] = None, location: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index cc3ed67db1..b8393734ca 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -31,6 +31,7 @@ class VertexDataset(base_artifact.BaseArtifactSchema): def __init__( self, + *, dataset_name: str, uri: str, artifact_id: Optional[str] = None, @@ -93,6 +94,7 @@ class VertexModel(base_artifact.BaseArtifactSchema): def __init__( self, + *, vertex_model_name: str, uri: str, artifact_id: Optional[str] = None, @@ -155,6 +157,7 @@ class VertexEndpoint(base_artifact.BaseArtifactSchema): def __init__( self, + *, vertex_endpoint_name: str, uri: str, artifact_id: Optional[str] = None, @@ -218,6 +221,7 @@ class UnmanagedContainerModel(base_artifact.BaseArtifactSchema): def __init__( self, + *, predict_schema_ta: utils.PredictSchemata, container_spec: utils.ContainerSpec, artifact_id: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index c94461bae3..3d8ed0404f 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -28,6 +28,7 @@ class Model(base_artifact.BaseArtifactSchema): def __init__( self, + *, uri: Optional[str] = None, artifact_id: Optional[str] = None, display_name: Optional[str] = None, @@ -79,6 +80,7 @@ class Artifact(base_artifact.BaseArtifactSchema): def __init__( self, + *, uri: Optional[str] = None, artifact_id: Optional[str] = None, display_name: Optional[str] = None, @@ -130,6 +132,7 @@ class Dataset(base_artifact.BaseArtifactSchema): def __init__( self, + *, uri: Optional[str] = None, artifact_id: Optional[str] = None, display_name: Optional[str] = None, @@ -181,6 +184,7 @@ class Metrics(base_artifact.BaseArtifactSchema): def __init__( self, + *, accuracy: Optional[float] = None, precision: Optional[float] = None, recall: Optional[float] = None, diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index eef556aad0..7223c694ba 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -28,6 +28,7 @@ class ContainerExecution(base_execution.BaseExecutionSchema): def __init__( self, + *, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, @@ -69,6 +70,7 @@ class CustomJobExecution(base_execution.BaseExecutionSchema): def __init__( self, + *, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, @@ -110,6 +112,7 @@ class Run(base_execution.BaseExecutionSchema): def __init__( self, + *, state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, From 48f65dcf7206ba642fe62ab25425b42ba857a818 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 17:12:11 +0000 Subject: [PATCH 41/48] always make a copy of metadata instead of pass by reference --- .../metadata/schema/google/artifact_schema.py | 9 ++++----- .../metadata/schema/system/artifact_schema.py | 11 +++++++---- .../metadata/schema/system/execution_schema.py | 9 ++++++--- tests/unit/aiplatform/test_metadata_schema.py | 14 +++++++++++++- 4 files changed, 30 insertions(+), 13 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index b8393734ca..49ae637bdf 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -73,7 +73,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} + extended_metadata = metadata.copy() if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = dataset_name super(VertexDataset, self).__init__( @@ -136,7 +136,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} + extended_metadata = metadata.copy() if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_model_name super(VertexModel, self).__init__( @@ -199,8 +199,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} - + extended_metadata = metadata.copy() if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_endpoint_name super(VertexEndpoint, self).__init__( @@ -261,7 +260,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} + extended_metadata = metadata.copy() if metadata else {} extended_metadata["predictSchemata"] = predict_schema_ta.to_dict() extended_metadata["containerSpec"] = container_spec.to_dict() diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index 3d8ed0404f..f52801e14c 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -62,13 +62,14 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ + extended_metadata = metadata.copy() if metadata else {} super(Model, self).__init__( uri=uri, artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, state=state, ) @@ -114,13 +115,14 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ + extended_metadata = metadata.copy() if metadata else {} super(Artifact, self).__init__( uri=uri, artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, state=state, ) @@ -166,13 +168,14 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ + extended_metadata = metadata.copy() if metadata else {} super(Dataset, self).__init__( uri=uri, artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, state=state, ) @@ -236,7 +239,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata or {} + extended_metadata = metadata.copy() if metadata else {} if accuracy: extended_metadata["accuracy"] = accuracy if precision: diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index 7223c694ba..832bba7372 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -53,13 +53,14 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ + extended_metadata = metadata.copy() if metadata else {} super(ContainerExecution, self).__init__( execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, ) @@ -95,13 +96,14 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ + extended_metadata = metadata.copy() if metadata else {} super(CustomJobExecution, self).__init__( execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, ) @@ -137,11 +139,12 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ + extended_metadata = metadata.copy() if metadata else {} super(Run, self).__init__( execution_id=execution_id, state=state, display_name=display_name, schema_version=schema_version, description=description, - metadata=metadata, + metadata=extended_metadata, ) diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 900869533b..43f7fc7bcf 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -355,11 +355,23 @@ def test_unmanaged_container_model_constructor_parameters_are_set_correctly(self description=_TEST_DESCRIPTION, metadata=_TEST_UPDATED_METADATA, ) + expected_metadata = { + "test-param1": 2, + "test-param2": "test-value-1", + "test-param3": False, + "predictSchemata": { + "instanceSchemaUri": "instance_uri", + "parametersSchemaUri": "parameters_uri", + "predictionSchemaUri": "prediction_uri", + }, + "containerSpec": {"imageUri": "gcr.io/test_container_image_uri"}, + } + assert artifact.artifact_id == _TEST_ARTIFACT_ID assert artifact.uri == _TEST_URI assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == _TEST_UPDATED_METADATA + assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) assert artifact.schema_version == _TEST_SCHEMA_VERSION From 086df906e8ea522364a5c2d6e12a4a09168d0aef Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 18:54:49 +0000 Subject: [PATCH 42/48] auto generate uri for google types --- .../metadata/schema/google/artifact_schema.py | 24 ++++----- .../cloud/aiplatform/metadata/schema/utils.py | 27 ++++++++++ tests/unit/aiplatform/test_metadata_schema.py | 51 ++++++++++++------- 3 files changed, 68 insertions(+), 34 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index 49ae637bdf..a053eb0cc6 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -32,8 +32,7 @@ class VertexDataset(base_artifact.BaseArtifactSchema): def __init__( self, *, - dataset_name: str, - uri: str, + vertex_dataset_name: str, artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -42,13 +41,12 @@ def __init__( state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, ): """Args: - dataset_name (str): + vertex_dataset_name (str): The name of the Dataset resource, in a form of projects/{project}/locations/{location}/datasets/{dataset}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets/get - uri (str): - The Vertex Dataset resource uri, in a form of + This is used to generate the resource uri as follows: https://{service-endpoint}/v1/{dataset_name}, where {service-endpoint} is one of the supported service endpoints at https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints @@ -74,10 +72,10 @@ def __init__( check the validity of state transitions. """ extended_metadata = metadata.copy() if metadata else {} - extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = dataset_name + extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_dataset_name super(VertexDataset, self).__init__( - uri=uri, + uri=utils.create_uri_from_resource_name(resource_name=vertex_dataset_name), artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, @@ -96,7 +94,6 @@ def __init__( self, *, vertex_model_name: str, - uri: str, artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -110,8 +107,7 @@ def __init__( projects/{project}/locations/{location}/models/{model}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models/get - uri (str): - The Vertex Model resource uri, in a form of + This is used to generate the resource uri as follows: https://{service-endpoint}/v1/{vertex_model_name}, where {service-endpoint} is one of the supported service endpoints at https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints @@ -140,7 +136,7 @@ def __init__( extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_model_name super(VertexModel, self).__init__( - uri=uri, + uri=utils.create_uri_from_resource_name(resource_name=vertex_model_name), artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, @@ -159,7 +155,6 @@ def __init__( self, *, vertex_endpoint_name: str, - uri: str, artifact_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -173,8 +168,7 @@ def __init__( projects/{project}/locations/{location}/endpoints/{endpoint}. For more details, see https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/get - uri (str): - The Vertex Endpoint resource uri, in a form of + This is used to generate the resource uri as follows: https://{service-endpoint}/v1/{vertex_endpoint_name}, where {service-endpoint} is one of the supported service endpoints at https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints @@ -203,7 +197,7 @@ def __init__( extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_endpoint_name super(VertexEndpoint, self).__init__( - uri=uri, + uri=utils.create_uri_from_resource_name(resource_name=vertex_endpoint_name), artifact_id=artifact_id, display_name=display_name, schema_version=schema_version, diff --git a/google/cloud/aiplatform/metadata/schema/utils.py b/google/cloud/aiplatform/metadata/schema/utils.py index 1b71c509e5..bccbad7be8 100644 --- a/google/cloud/aiplatform/metadata/schema/utils.py +++ b/google/cloud/aiplatform/metadata/schema/utils.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import re + from typing import Optional, Dict, List from dataclasses import dataclass @@ -139,3 +141,28 @@ def to_dict(self): results["healthRoute"] = self.health_route return results + + +def create_uri_from_resource_name(resource_name: str) -> bool: + """Construct the service URI for a given resource_name. + Args: + resource_name (str): + The name of the Vertex resource, in a form of + projects/{project}/locations/{location}/{resource_type}/{resource_id} + Returns: + The resource URI in the form of: + https://{service-endpoint}/v1/{resource_name}, + where {service-endpoint} is one of the supported service endpoints at + https://cloud.google.com/vertex-ai/docs/reference/rest#rest_endpoints + Raises: + ValueError: If resource_name does not match the specified format. + """ + match_results = re.match( + r"^projects\/[A-Za-z0-9-]*\/locations\/([A-Za-z0-9-]*)\/[A-Za-z0-9-]*\/[A-Za-z0-9-]*$", + resource_name, + ) + if not match_results: + raise ValueError(f"Invalid resource_name format for {resource_name}.") + + location = match_results.group(1) + return f"https://{location}-aiplatform.googleapis.com/v1/{resource_name}" diff --git a/tests/unit/aiplatform/test_metadata_schema.py b/tests/unit/aiplatform/test_metadata_schema.py index 43f7fc7bcf..cbf7d38609 100644 --- a/tests/unit/aiplatform/test_metadata_schema.py +++ b/tests/unit/aiplatform/test_metadata_schema.py @@ -48,7 +48,7 @@ _TEST_PROJECT = "test-project" _TEST_LOCATION = "us-central1" _TEST_METADATA_STORE = "test-metadata-store" -_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/{_TEST_METADATA_STORE}" +_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}" # resource attributes _TEST_ARTIFACT_STATE = gca_artifact.Artifact.State.STATE_UNSPECIFIED @@ -165,7 +165,9 @@ class TestArtifact(base_artifact.BaseArtifactSchema): ) artifact.create(metadata_store_id=_TEST_METADATA_STORE) create_artifact_mock.assert_called_once_with( - parent=_TEST_PARENT, artifact=mock.ANY, artifact_id=None + parent=f"{_TEST_PARENT}/metadataStores/{_TEST_METADATA_STORE}", + artifact=mock.ANY, + artifact_id=None, ) _, _, kwargs = create_artifact_mock.mock_calls[0] assert kwargs["artifact"].schema_title == _TEST_SCHEMA_TITLE @@ -233,7 +235,9 @@ class TestExecution(base_execution.BaseExecutionSchema): ) execution.create(metadata_store_id=_TEST_METADATA_STORE) create_execution_mock.assert_called_once_with( - parent=_TEST_PARENT, execution=mock.ANY, execution_id=None + parent=f"{_TEST_PARENT}/metadataStores/{_TEST_METADATA_STORE}", + execution=mock.ANY, + execution_id=None, ) _, _, kwargs = create_execution_mock.mock_calls[0] assert kwargs["execution"].schema_title == _TEST_SCHEMA_TITLE @@ -254,68 +258,77 @@ def teardown_method(self): def test_vertex_dataset_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexDataset( - dataset_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, + vertex_dataset_name=_TEST_ARTIFACT_NAME, ) assert artifact.schema_title == "google.VertexDataset" def test_vertex_dataset_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexDataset( - dataset_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, + vertex_dataset_name=f"{_TEST_PARENT}/datasets/dataset-id", display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata={}, ) - assert artifact.uri == _TEST_URI + assert ( + artifact.uri + == "https://us-central1-aiplatform.googleapis.com/v1/projects/test-project/locations/us-central1/datasets/dataset-id" + ) assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} + assert artifact.metadata == { + "resourceName": "projects/test-project/locations/us-central1/datasets/dataset-id" + } assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_model_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexModel( vertex_model_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, ) assert artifact.schema_title == "google.VertexModel" def test_vertex_model_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexModel( - vertex_model_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, + vertex_model_name=f"{_TEST_PARENT}/models/model-id", display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata={}, ) - assert artifact.uri == _TEST_URI + assert ( + artifact.uri + == "https://us-central1-aiplatform.googleapis.com/v1/projects/test-project/locations/us-central1/models/model-id" + ) assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} + assert artifact.metadata == { + "resourceName": "projects/test-project/locations/us-central1/models/model-id" + } assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_vertex_endpoint_schema_title_is_set_correctly(self): artifact = google_artifact_schema.VertexEndpoint( vertex_endpoint_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, ) assert artifact.schema_title == "google.VertexEndpoint" def test_vertex_endpoint_constructor_parameters_are_set_correctly(self): artifact = google_artifact_schema.VertexEndpoint( - vertex_endpoint_name=_TEST_ARTIFACT_NAME, - uri=_TEST_URI, + vertex_endpoint_name=f"{_TEST_PARENT}/endpoints/endpoint-id", display_name=_TEST_DISPLAY_NAME, schema_version=_TEST_SCHEMA_VERSION, description=_TEST_DESCRIPTION, metadata={}, ) - assert artifact.uri == _TEST_URI + assert ( + artifact.uri + == "https://us-central1-aiplatform.googleapis.com/v1/projects/test-project/locations/us-central1/endpoints/endpoint-id" + ) assert artifact.display_name == _TEST_DISPLAY_NAME assert artifact.description == _TEST_DESCRIPTION - assert artifact.metadata == {"resourceName": _TEST_ARTIFACT_NAME} + assert artifact.metadata == { + "resourceName": "projects/test-project/locations/us-central1/endpoints/endpoint-id" + } assert artifact.schema_version == _TEST_SCHEMA_VERSION def test_unmanaged_container_model_title_is_set_correctly(self): From af765f3ccb27003f857c89efce04a71dcfdcd8cb Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Thu, 23 Jun 2022 21:16:40 +0000 Subject: [PATCH 43/48] fix e2e tests --- .../aiplatform/test_e2e_metadata_schema.py | 57 +++++-------------- 1 file changed, 13 insertions(+), 44 deletions(-) diff --git a/tests/system/aiplatform/test_e2e_metadata_schema.py b/tests/system/aiplatform/test_e2e_metadata_schema.py index 70367bed44..238e32606b 100644 --- a/tests/system/aiplatform/test_e2e_metadata_schema.py +++ b/tests/system/aiplatform/test_e2e_metadata_schema.py @@ -19,8 +19,6 @@ import pytest from google.cloud import aiplatform -from google.cloud.aiplatform.metadata.schema import base_artifact -from google.cloud.aiplatform.metadata.schema import base_execution from google.cloud.aiplatform.metadata.schema.google import ( artifact_schema as google_artifact_schema, ) @@ -48,26 +46,6 @@ def setup_class(cls): cls.execution_display_name = cls._make_display_name("base-execution")[:30] cls.execution_description = cls._make_display_name("base-description") - def test_artifact_creation_using_schema_base_class(self): - - aiplatform.init( - project=e2e_base._PROJECT, - location=e2e_base._LOCATION, - ) - - artifact = base_artifact.BaseArtifactSchema( - display_name=self.artifact_display_name, - uri=self.artifact_uri, - metadata=self.artifact_metadata, - description=self.artifact_description, - ).create() - - assert artifact.display_name == self.artifact_display_name - assert json.dumps(artifact.metadata) == json.dumps(self.artifact_metadata) - assert artifact.schema_title == "system.Artifact" - assert artifact.description == self.artifact_description - assert "/metadataStores/default/artifacts/" in artifact.resource_name - def test_system_dataset_artifact_create(self): aiplatform.init( @@ -83,7 +61,9 @@ def test_system_dataset_artifact_create(self): ).create() assert artifact.display_name == self.artifact_display_name - assert json.dumps(artifact.metadata) == json.dumps(self.artifact_metadata) + assert json.dumps(artifact.metadata, sort_keys=True) == json.dumps( + self.artifact_metadata, sort_keys=True + ) assert artifact.schema_title == "system.Dataset" assert artifact.description == self.artifact_description assert "/metadataStores/default/artifacts/" in artifact.resource_name @@ -94,39 +74,28 @@ def test_google_dataset_artifact_create(self): project=e2e_base._PROJECT, location=e2e_base._LOCATION, ) - + vertex_dataset_name = f"projects/{e2e_base._PROJECT}/locations/{e2e_base._LOCATION}/datasets/dataset" artifact = google_artifact_schema.VertexDataset( - dataset_name=self.artifact_id, + vertex_dataset_name=vertex_dataset_name, display_name=self.artifact_display_name, - uri=self.artifact_uri, metadata=self.artifact_metadata, description=self.artifact_description, ).create() - expected_metadata = self.artifact_metadata + expected_metadata = self.artifact_metadata.copy() + expected_metadata["resourceName"] = vertex_dataset_name assert artifact.display_name == self.artifact_display_name - assert json.dumps(artifact.metadata) == json.dumps(expected_metadata) + assert json.dumps(artifact.metadata, sort_keys=True) == json.dumps( + expected_metadata, sort_keys=True + ) assert artifact.schema_title == "google.VertexDataset" assert artifact.description == self.artifact_description assert "/metadataStores/default/artifacts/" in artifact.resource_name - - def test_execution_create_using_schema_base_class(self): - - aiplatform.init( - project=e2e_base._PROJECT, - location=e2e_base._LOCATION, + assert ( + artifact.uri + == f"https://{e2e_base._LOCATION}-aiplatform.googleapis.com/v1/{vertex_dataset_name}" ) - execution = base_execution.BaseExecutionSchema( - display_name=self.execution_display_name, - description=self.execution_description, - ).create() - - assert execution.display_name == self.execution_display_name - assert execution.schema_title == "system.ContainerExecution" - assert execution.description == self.execution_description - assert "/metadataStores/default/executions/" in execution.resource_name - def test_execution_create_using_system_schema_class(self): aiplatform.init( From 63a7e5700592936349ac78f46d5b97c22ae9ef6d Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Sat, 25 Jun 2022 16:25:38 +0000 Subject: [PATCH 44/48] switch to using Artifact.create instead of _create --- google/cloud/aiplatform/metadata/artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index d99df6de4e..65ee2cb92b 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -362,7 +362,7 @@ def create_from_base_artifact_schema( Artifact: Instantiated representation of the managed Metadata Artifact. """ - return cls._create( + return cls.create( resource_id=base_artifact_schema.artifact_id, schema_title=base_artifact_schema.schema_title, uri=base_artifact_schema.uri, From 246dfcc9eee57e416eb4eb62c82b949bab5b8ee9 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Mon, 27 Jun 2022 21:58:18 +0000 Subject: [PATCH 45/48] change typing for state to Optional --- .../aiplatform/metadata/schema/base_execution.py | 4 +++- .../metadata/schema/google/artifact_schema.py | 8 ++++---- .../metadata/schema/system/artifact_schema.py | 8 ++++---- .../metadata/schema/system/execution_schema.py | 12 +++++++++--- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 170768d70e..811b7d9791 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -39,7 +39,9 @@ def schema_title(cls) -> str: def __init__( self, *, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + state: Optional[ + gca_execution.Execution.State + ] = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index a053eb0cc6..2a62c98244 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -38,7 +38,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: vertex_dataset_name (str): @@ -99,7 +99,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: vertex_model_name (str): @@ -160,7 +160,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: vertex_endpoint_name (str): @@ -223,7 +223,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: predict_schema_ta (PredictSchemata): diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index f52801e14c..ce91f2ee53 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -35,7 +35,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: uri (str): @@ -88,7 +88,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: uri (str): @@ -141,7 +141,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: uri (str): @@ -200,7 +200,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Args: accuracy (float): diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index 832bba7372..9564134606 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -29,7 +29,9 @@ class ContainerExecution(base_execution.BaseExecutionSchema): def __init__( self, *, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + state: Optional[ + gca_execution.Execution.State + ] = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -72,7 +74,9 @@ class CustomJobExecution(base_execution.BaseExecutionSchema): def __init__( self, *, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + state: Optional[ + gca_execution.Execution.State + ] = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, @@ -115,7 +119,9 @@ class Run(base_execution.BaseExecutionSchema): def __init__( self, *, - state: gca_execution.Execution.State = gca_execution.Execution.State.RUNNING, + state: Optional[ + gca_execution.Execution.State + ] = gca_execution.Execution.State.RUNNING, execution_id: Optional[str] = None, display_name: Optional[str] = None, schema_version: Optional[str] = None, From 575531e301de6db70c97d574a191ab5408ac0255 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Mon, 27 Jun 2022 22:03:58 +0000 Subject: [PATCH 46/48] change typing for state to Optional in artifact base file --- google/cloud/aiplatform/metadata/schema/base_artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 1017534133..c89d989edd 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -45,7 +45,7 @@ def __init__( schema_version: Optional[str] = None, description: Optional[str] = None, metadata: Optional[Dict] = None, - state: gca_artifact.Artifact.State = gca_artifact.Artifact.State.LIVE, + state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): """Initializes the Artifact with the given name, URI and metadata. From 3b17ce77f9951085a80f53ca6feaefb5c7e3beb8 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Mon, 27 Jun 2022 23:20:17 +0000 Subject: [PATCH 47/48] change to use the Execution.create instead of the private method --- google/cloud/aiplatform/metadata/execution.py | 26 +++++++------------ 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 9a0c7d40c1..895417fc64 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -203,28 +203,20 @@ def create_from_base_execution_schema( Execution: Instantiated representation of the managed Metadata Execution. """ - self = cls._empty_constructor( - project=project, location=location, credentials=credentials - ) - super(base.VertexAiResourceNounWithFutureManager, self).__init__() - - resource = Execution._create_resource( - client=self.api_client, - parent=metadata_store._MetadataStore._format_resource_name( - project=self.project, - location=self.location, - metadata_store=metadata_store_id, - ), + resource = Execution.create( + state=base_execution_schema.state, schema_title=base_execution_schema.schema_title, resource_id=base_execution_schema.execution_id, - metadata=base_execution_schema.metadata, - description=base_execution_schema.description, display_name=base_execution_schema.display_name, schema_version=base_execution_schema.schema_version, - state=base_execution_schema.state, + metadata=base_execution_schema.metadata, + description=base_execution_schema.description, + metadata_store_id=metadata_store_id, + project=project, + location=location, + credentials=credentials, ) - self._gca_resource = resource - return self + return resource def __enter__(self): if self.state is not gca_execution.Execution.State.RUNNING: From 323075e1d953c4bd44bf644f4fa9b633ccc50b84 Mon Sep 17 00:00:00 2001 From: sina chavoshi Date: Tue, 28 Jun 2022 18:38:43 +0000 Subject: [PATCH 48/48] chagne copy to deepcopy for metadata --- .../aiplatform/metadata/schema/google/artifact_schema.py | 9 +++++---- .../aiplatform/metadata/schema/system/artifact_schema.py | 9 +++++---- .../metadata/schema/system/execution_schema.py | 7 ++++--- google/cloud/aiplatform/metadata/schema/utils.py | 1 + 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index 2a62c98244..99e0fb0ba6 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy from typing import Optional, Dict from google.cloud.aiplatform.compat.types import artifact as gca_artifact @@ -71,7 +72,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_dataset_name super(VertexDataset, self).__init__( @@ -132,7 +133,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_model_name super(VertexModel, self).__init__( @@ -193,7 +194,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} extended_metadata[_ARTIFACT_PROPERTY_KEY_RESOURCE_NAME] = vertex_endpoint_name super(VertexEndpoint, self).__init__( @@ -254,7 +255,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} extended_metadata["predictSchemata"] = predict_schema_ta.to_dict() extended_metadata["containerSpec"] = container_spec.to_dict() diff --git a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py index ce91f2ee53..f3491a5573 100644 --- a/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/artifact_schema.py @@ -15,6 +15,7 @@ # limitations under the License. # +import copy from typing import Optional, Dict from google.cloud.aiplatform.compat.types import artifact as gca_artifact @@ -62,7 +63,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(Model, self).__init__( uri=uri, artifact_id=artifact_id, @@ -115,7 +116,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(Artifact, self).__init__( uri=uri, artifact_id=artifact_id, @@ -168,7 +169,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(Dataset, self).__init__( uri=uri, artifact_id=artifact_id, @@ -239,7 +240,7 @@ def __init__( Pipelines), and the system does not prescribe or check the validity of state transitions. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} if accuracy: extended_metadata["accuracy"] = accuracy if precision: diff --git a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py index 9564134606..68c96902cb 100644 --- a/google/cloud/aiplatform/metadata/schema/system/execution_schema.py +++ b/google/cloud/aiplatform/metadata/schema/system/execution_schema.py @@ -15,6 +15,7 @@ # limitations under the License. # +import copy from typing import Optional, Dict from google.cloud.aiplatform.compat.types import execution as gca_execution @@ -55,7 +56,7 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(ContainerExecution, self).__init__( execution_id=execution_id, state=state, @@ -100,7 +101,7 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(CustomJobExecution, self).__init__( execution_id=execution_id, state=state, @@ -145,7 +146,7 @@ def __init__( description (str): Optional. Describes the purpose of the Execution to be created. """ - extended_metadata = metadata.copy() if metadata else {} + extended_metadata = copy.deepcopy(metadata) if metadata else {} super(Run, self).__init__( execution_id=execution_id, state=state, diff --git a/google/cloud/aiplatform/metadata/schema/utils.py b/google/cloud/aiplatform/metadata/schema/utils.py index bccbad7be8..72577d9324 100644 --- a/google/cloud/aiplatform/metadata/schema/utils.py +++ b/google/cloud/aiplatform/metadata/schema/utils.py @@ -157,6 +157,7 @@ def create_uri_from_resource_name(resource_name: str) -> bool: Raises: ValueError: If resource_name does not match the specified format. """ + # TODO: support nested resource names such as models/123/evaluations/456 match_results = re.match( r"^projects\/[A-Za-z0-9-]*\/locations\/([A-Za-z0-9-]*)\/[A-Za-z0-9-]*\/[A-Za-z0-9-]*$", resource_name,