Skip to content

Commit

Permalink
feat: add model_version_id to BatchPredictionJob in aiplatform v1 bat…
Browse files Browse the repository at this point in the history
…ch_prediction_job.proto (#1453)

* feat: add BatchImportModelEvaluationSlices API in aiplatform v1beta1 model_service.proto

PiperOrigin-RevId: 456559576

Source-Link: googleapis/googleapis@f5cd509

Source-Link: googleapis/googleapis-gen@b3549d7
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjM1NDlkN2E2MjExNmQ1ZGJmNDllMjJmYjNkNGM3NTlkNWRhODMxMyJ9

* feat: add model_version_id to BatchPredictionJob in aiplatform v1 batch_prediction_job.proto

feat: add model_version_id to DeployedModel in aiplatform v1 endpoint.proto

feat: add model_version_id to PredictResponse in aiplatform v1 prediction_service.proto
PiperOrigin-RevId: 456571762

Source-Link: googleapis/googleapis@19bd392

Source-Link: googleapis/googleapis-gen@d4dfc80
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDRkZmM4MDJhODhmZTc3YmMwNDE5MWY0YjFkOWI1OTQ4MWU2OTJmZCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Jun 22, 2022
1 parent d35df58 commit 9ef057a
Show file tree
Hide file tree
Showing 22 changed files with 985 additions and 50 deletions.
18 changes: 9 additions & 9 deletions google/cloud/aiplatform_v1/services/migration_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,23 +196,18 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand Down Expand Up @@ -240,18 +235,23 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
7 changes: 7 additions & 0 deletions google/cloud/aiplatform_v1/types/batch_prediction_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ class BatchPredictionJob(proto.Message):
The model resource name may contain version id or version
alias to specify the version, if no version is specified,
the default version will be used.
model_version_id (str):
Output only. The version ID of the Model that
produces the predictions via this job.
unmanaged_container_model (google.cloud.aiplatform_v1.types.UnmanagedContainerModel):
Contains model information necessary to perform batch
prediction without requiring uploading to model registry.
Expand Down Expand Up @@ -405,6 +408,10 @@ class OutputInfo(proto.Message):
proto.STRING,
number=3,
)
model_version_id = proto.Field(
proto.STRING,
number=30,
)
unmanaged_container_model = proto.Field(
proto.MESSAGE,
number=28,
Expand Down
7 changes: 7 additions & 0 deletions google/cloud/aiplatform_v1/types/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,9 @@ class DeployedModel(proto.Message):
version alias to specify the version, if no
version is specified, the default version will
be deployed.
model_version_id (str):
Output only. The version ID of the model that
is deployed.
display_name (str):
The display name of the DeployedModel. If not provided upon
creation, the Model's display_name is used.
Expand Down Expand Up @@ -311,6 +314,10 @@ class DeployedModel(proto.Message):
proto.STRING,
number=2,
)
model_version_id = proto.Field(
proto.STRING,
number=18,
)
display_name = proto.Field(
proto.STRING,
number=3,
Expand Down
8 changes: 8 additions & 0 deletions google/cloud/aiplatform_v1/types/prediction_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ class PredictResponse(proto.Message):
Output only. The resource name of the Model
which is deployed as the DeployedModel that this
prediction hits.
model_version_id (str):
Output only. The version ID of the Model
which is deployed as the DeployedModel that this
prediction hits.
model_display_name (str):
Output only. The [display
name][google.cloud.aiplatform.v1.Model.display_name] of the
Expand All @@ -116,6 +120,10 @@ class PredictResponse(proto.Message):
proto.STRING,
number=3,
)
model_version_id = proto.Field(
proto.STRING,
number=5,
)
model_display_name = proto.Field(
proto.STRING,
number=4,
Expand Down
4 changes: 4 additions & 0 deletions google/cloud/aiplatform_v1beta1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,8 @@
from .types.model_monitoring import ModelMonitoringObjectiveConfig
from .types.model_monitoring import SamplingStrategy
from .types.model_monitoring import ThresholdConfig
from .types.model_service import BatchImportModelEvaluationSlicesRequest
from .types.model_service import BatchImportModelEvaluationSlicesResponse
from .types.model_service import DeleteModelRequest
from .types.model_service import DeleteModelVersionRequest
from .types.model_service import ExportModelOperationMetadata
Expand Down Expand Up @@ -541,6 +543,8 @@
"BatchCreateTensorboardTimeSeriesRequest",
"BatchCreateTensorboardTimeSeriesResponse",
"BatchDedicatedResources",
"BatchImportModelEvaluationSlicesRequest",
"BatchImportModelEvaluationSlicesResponse",
"BatchMigrateResourcesOperationMetadata",
"BatchMigrateResourcesRequest",
"BatchMigrateResourcesResponse",
Expand Down
10 changes: 10 additions & 0 deletions google/cloud/aiplatform_v1beta1/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -1266,6 +1266,11 @@
"grpc": {
"libraryClient": "ModelServiceClient",
"rpcs": {
"BatchImportModelEvaluationSlices": {
"methods": [
"batch_import_model_evaluation_slices"
]
},
"DeleteModel": {
"methods": [
"delete_model"
Expand Down Expand Up @@ -1346,6 +1351,11 @@
"grpc-async": {
"libraryClient": "ModelServiceAsyncClient",
"rpcs": {
"BatchImportModelEvaluationSlices": {
"methods": [
"batch_import_model_evaluation_slices"
]
},
"DeleteModel": {
"methods": [
"delete_model"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,40 +196,40 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand Down
111 changes: 111 additions & 0 deletions google/cloud/aiplatform_v1beta1/services/model_service/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1539,6 +1539,117 @@ async def sample_import_model_evaluation():
# Done; return the response.
return response

async def batch_import_model_evaluation_slices(
self,
request: Union[
model_service.BatchImportModelEvaluationSlicesRequest, dict
] = None,
*,
parent: str = None,
model_evaluation_slices: Sequence[
model_evaluation_slice.ModelEvaluationSlice
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> model_service.BatchImportModelEvaluationSlicesResponse:
r"""Imports a list of externally generated
ModelEvaluationSlice.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
async def sample_batch_import_model_evaluation_slices():
# Create a client
client = aiplatform_v1beta1.ModelServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.BatchImportModelEvaluationSlicesRequest(
parent="parent_value",
)
# Make the request
response = await client.batch_import_model_evaluation_slices(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.BatchImportModelEvaluationSlicesRequest, dict]):
The request object. Request message for
[ModelService.BatchImportModelEvaluationSlices][google.cloud.aiplatform.v1beta1.ModelService.BatchImportModelEvaluationSlices]
parent (:class:`str`):
Required. The name of the parent ModelEvaluation
resource. Format:
``projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model_evaluation_slices (:class:`Sequence[google.cloud.aiplatform_v1beta1.types.ModelEvaluationSlice]`):
Required. Model evaluation slice
resource to be imported.
This corresponds to the ``model_evaluation_slices`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.BatchImportModelEvaluationSlicesResponse:
Response message for
[ModelService.BatchImportModelEvaluationSlices][google.cloud.aiplatform.v1beta1.ModelService.BatchImportModelEvaluationSlices]
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, model_evaluation_slices])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

request = model_service.BatchImportModelEvaluationSlicesRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if model_evaluation_slices:
request.model_evaluation_slices.extend(model_evaluation_slices)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_import_model_evaluation_slices,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def get_model_evaluation(
self,
request: Union[model_service.GetModelEvaluationRequest, dict] = None,
Expand Down
Loading

0 comments on commit 9ef057a

Please sign in to comment.