diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 69f9839ce8..b96692805c 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -183,16 +183,19 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, dataset: str,) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + return "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -212,19 +215,16 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path(project: str, dataset: str,) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + return "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index 7872cfcd14..b62d0bc8ca 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -95,6 +95,16 @@ class BatchPredictionJob(proto.Message): DEDICATED_RESOURCES this config may be provided (and the job will use these resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config must be provided. + service_account (str): + The service account that the DeployedModel's container runs + as. If not specified, a system generated one will be used, + which has minimal permissions and the custom container, if + used, may not have enough permission to access other GCP + resources. + + Users deploying the Model must have the + ``iam.serviceAccounts.actAs`` permission on this service + account. manual_batch_tuning_parameters (google.cloud.aiplatform_v1beta1.types.ManualBatchTuningParameters): Immutable. Parameters configuring the batch behavior. Currently only applicable when @@ -381,6 +391,7 @@ class OutputInfo(proto.Message): dedicated_resources = proto.Field( proto.MESSAGE, number=7, message=machine_resources.BatchDedicatedResources, ) + service_account = proto.Field(proto.STRING, number=29,) manual_batch_tuning_parameters = proto.Field( proto.MESSAGE, number=8, diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py index e782616a7c..edfdcb1e51 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py @@ -4494,6 +4494,7 @@ def test_create_batch_prediction_job(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", model="model_value", + service_account="service_account_value", generate_explanation=True, state=job_state.JobState.JOB_STATE_QUEUED, ) @@ -4509,6 +4510,7 @@ def test_create_batch_prediction_job(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.model == "model_value" + assert response.service_account == "service_account_value" assert response.generate_explanation is True assert response.state == job_state.JobState.JOB_STATE_QUEUED @@ -4553,6 +4555,7 @@ async def test_create_batch_prediction_job_async( name="name_value", display_name="display_name_value", model="model_value", + service_account="service_account_value", generate_explanation=True, state=job_state.JobState.JOB_STATE_QUEUED, ) @@ -4569,6 +4572,7 @@ async def test_create_batch_prediction_job_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.model == "model_value" + assert response.service_account == "service_account_value" assert response.generate_explanation is True assert response.state == job_state.JobState.JOB_STATE_QUEUED @@ -4750,6 +4754,7 @@ def test_get_batch_prediction_job(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", model="model_value", + service_account="service_account_value", generate_explanation=True, state=job_state.JobState.JOB_STATE_QUEUED, ) @@ -4765,6 +4770,7 @@ def test_get_batch_prediction_job(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.model == "model_value" + assert response.service_account == "service_account_value" assert response.generate_explanation is True assert response.state == job_state.JobState.JOB_STATE_QUEUED @@ -4809,6 +4815,7 @@ async def test_get_batch_prediction_job_async( name="name_value", display_name="display_name_value", model="model_value", + service_account="service_account_value", generate_explanation=True, state=job_state.JobState.JOB_STATE_QUEUED, ) @@ -4825,6 +4832,7 @@ async def test_get_batch_prediction_job_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.model == "model_value" + assert response.service_account == "service_account_value" assert response.generate_explanation is True assert response.state == job_state.JobState.JOB_STATE_QUEUED diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index d98d95606c..a18e833705 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -1795,18 +1795,20 @@ def test_parse_annotated_dataset_path(): def test_dataset_path(): project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + location = "mussel" + dataset = "winkle" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, dataset) + actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "winkle", - "dataset": "nautilus", + "project": "nautilus", + "location": "scallop", + "dataset": "abalone", } path = MigrationServiceClient.dataset_path(**expected) @@ -1816,9 +1818,9 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "scallop" - location = "abalone" - dataset = "squid" + project = "squid" + location = "clam" + dataset = "whelk" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, location=location, dataset=dataset, ) @@ -1828,9 +1830,9 @@ def test_dataset_path(): def test_parse_dataset_path(): expected = { - "project": "clam", - "location": "whelk", - "dataset": "octopus", + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", } path = MigrationServiceClient.dataset_path(**expected) @@ -1840,20 +1842,18 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "oyster" - location = "nudibranch" - dataset = "cuttlefish" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project = "cuttlefish" + dataset = "mussel" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, location, dataset) + actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "mussel", - "location": "winkle", + "project": "winkle", "dataset": "nautilus", } path = MigrationServiceClient.dataset_path(**expected)