diff --git a/docs/aiplatform_v1beta1/deployment_resource_pool_service.rst b/docs/aiplatform_v1beta1/deployment_resource_pool_service.rst new file mode 100644 index 0000000000..5d07a6c151 --- /dev/null +++ b/docs/aiplatform_v1beta1/deployment_resource_pool_service.rst @@ -0,0 +1,10 @@ +DeploymentResourcePoolService +----------------------------------------------- + +.. automodule:: google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service + :members: + :inherited-members: + +.. automodule:: google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers + :members: + :inherited-members: diff --git a/docs/aiplatform_v1beta1/services.rst b/docs/aiplatform_v1beta1/services.rst index 490112c7d9..4d4f000af1 100644 --- a/docs/aiplatform_v1beta1/services.rst +++ b/docs/aiplatform_v1beta1/services.rst @@ -4,6 +4,7 @@ Services for Google Cloud Aiplatform v1beta1 API :maxdepth: 2 dataset_service + deployment_resource_pool_service endpoint_service featurestore_online_serving_service featurestore_service diff --git a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py index f05a186b24..5462e20e0c 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py @@ -745,9 +745,6 @@ async def sample_delete_dataset(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/dataset_service/client.py b/google/cloud/aiplatform_v1/services/dataset_service/client.py index 07f6ff77dd..026c1d3f70 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/client.py @@ -1058,9 +1058,6 @@ def sample_delete_dataset(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py index 87a4c32dee..74f2c2618e 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py @@ -748,9 +748,6 @@ async def sample_delete_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1/services/endpoint_service/client.py index f436aeee75..34984818da 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/client.py @@ -1026,9 +1026,6 @@ def sample_delete_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py b/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py index c2f7102c03..7fda241104 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py @@ -793,9 +793,6 @@ async def sample_delete_featurestore(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1403,9 +1400,6 @@ async def sample_delete_entity_type(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2132,9 +2126,6 @@ async def sample_delete_feature(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/client.py b/google/cloud/aiplatform_v1/services/featurestore_service/client.py index fe2e692329..60143748d3 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/client.py @@ -1059,9 +1059,6 @@ def sample_delete_featurestore(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1669,9 +1666,6 @@ def sample_delete_entity_type(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2398,9 +2392,6 @@ def sample_delete_feature(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py index 9071bce789..d064474ce5 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py @@ -727,9 +727,6 @@ async def sample_delete_index_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py index 2316e23fac..58682a530f 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py @@ -969,9 +969,6 @@ def sample_delete_index_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/index_service/async_client.py b/google/cloud/aiplatform_v1/services/index_service/async_client.py index 6d40221fd9..4e6e816a3d 100644 --- a/google/cloud/aiplatform_v1/services/index_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/index_service/async_client.py @@ -742,9 +742,6 @@ async def sample_delete_index(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/index_service/client.py b/google/cloud/aiplatform_v1/services/index_service/client.py index 7caaf32b58..2998fc75e3 100644 --- a/google/cloud/aiplatform_v1/services/index_service/client.py +++ b/google/cloud/aiplatform_v1/services/index_service/client.py @@ -987,9 +987,6 @@ def sample_delete_index(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/job_service/async_client.py b/google/cloud/aiplatform_v1/services/job_service/async_client.py index 40dde742ee..ba1c6d18d8 100644 --- a/google/cloud/aiplatform_v1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/job_service/async_client.py @@ -661,9 +661,6 @@ async def sample_delete_custom_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1196,9 +1193,6 @@ async def sample_delete_data_labeling_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1728,9 +1722,6 @@ async def sample_delete_hyperparameter_tuning_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2275,9 +2266,6 @@ async def sample_delete_batch_prediction_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3112,9 +3100,6 @@ async def sample_delete_model_deployment_monitoring_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/job_service/client.py b/google/cloud/aiplatform_v1/services/job_service/client.py index 5db58126cb..e0c23f0eda 100644 --- a/google/cloud/aiplatform_v1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1/services/job_service/client.py @@ -1079,9 +1079,6 @@ def sample_delete_custom_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1614,9 +1611,6 @@ def sample_delete_data_labeling_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2152,9 +2146,6 @@ def sample_delete_hyperparameter_tuning_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2707,9 +2698,6 @@ def sample_delete_batch_prediction_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3572,9 +3560,6 @@ def sample_delete_model_deployment_monitoring_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py index 27be98a4c5..7ed573919a 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py @@ -651,9 +651,6 @@ async def sample_delete_metadata_store(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1205,9 +1202,6 @@ async def sample_delete_artifact(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1871,9 +1865,6 @@ async def sample_delete_context(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2882,9 +2873,6 @@ async def sample_delete_execution(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/metadata_service/client.py b/google/cloud/aiplatform_v1/services/metadata_service/client.py index 367809f5f5..49d335551d 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/client.py @@ -960,9 +960,6 @@ def sample_delete_metadata_store(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1514,9 +1511,6 @@ def sample_delete_artifact(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2180,9 +2174,6 @@ def sample_delete_context(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3197,9 +3188,6 @@ def sample_delete_execution(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/model_service/async_client.py b/google/cloud/aiplatform_v1/services/model_service/async_client.py index 8f5374a495..0fd9729cce 100644 --- a/google/cloud/aiplatform_v1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/model_service/async_client.py @@ -883,9 +883,6 @@ async def sample_delete_model(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1012,9 +1009,6 @@ async def sample_delete_model_version(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/model_service/client.py b/google/cloud/aiplatform_v1/services/model_service/client.py index 7359d8e36f..9df3e08c34 100644 --- a/google/cloud/aiplatform_v1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1/services/model_service/client.py @@ -1188,9 +1188,6 @@ def sample_delete_model(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1317,9 +1314,6 @@ def sample_delete_model_version(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py index 1536c04942..335df2f723 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py @@ -637,9 +637,6 @@ async def sample_delete_training_pipeline(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1177,9 +1174,6 @@ async def sample_delete_pipeline_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1/services/pipeline_service/client.py index 6279085a8b..efa1096653 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/client.py @@ -1021,9 +1021,6 @@ def sample_delete_training_pipeline(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1561,9 +1558,6 @@ def sample_delete_pipeline_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py index 97ebd096bc..4e6441f17f 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py @@ -644,9 +644,6 @@ async def sample_delete_specialist_pool(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py index 7741b02deb..c33409cd91 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py @@ -862,9 +862,6 @@ def sample_delete_specialist_pool(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py b/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py index 46eb6e7858..62cf4fc67d 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py @@ -786,9 +786,6 @@ async def sample_delete_tensorboard(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1363,9 +1360,6 @@ async def sample_delete_tensorboard_experiment(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2056,9 +2050,6 @@ async def sample_delete_tensorboard_run(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2750,9 +2741,6 @@ async def sample_delete_tensorboard_time_series(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/client.py b/google/cloud/aiplatform_v1/services/tensorboard_service/client.py index b99f8c85c2..8802aeb4b1 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/client.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/client.py @@ -1062,9 +1062,6 @@ def sample_delete_tensorboard(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1653,9 +1650,6 @@ def sample_delete_tensorboard_experiment(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2354,9 +2348,6 @@ def sample_delete_tensorboard_run(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3068,9 +3059,6 @@ def sample_delete_tensorboard_time_series(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1/types/custom_job.py b/google/cloud/aiplatform_v1/types/custom_job.py index 1b269d4fc2..2b33bbf119 100644 --- a/google/cloud/aiplatform_v1/types/custom_job.py +++ b/google/cloud/aiplatform_v1/types/custom_job.py @@ -165,7 +165,7 @@ class CustomJob(proto.Message): class CustomJobSpec(proto.Message): r"""Represents the spec of a CustomJob. - Next Id: 14 + Next Id: 15 Attributes: worker_pool_specs (Sequence[google.cloud.aiplatform_v1.types.WorkerPoolSpec]): @@ -183,7 +183,7 @@ class CustomJobSpec(proto.Message): Agent `__ for the CustomJob's project is used. network (str): - The full name of the Compute Engine + Optional. The full name of the Compute Engine `network `__ to which the Job should be peered. For example, ``projects/12345/global/networks/myVPC``. diff --git a/google/cloud/aiplatform_v1/types/dataset_service.py b/google/cloud/aiplatform_v1/types/dataset_service.py index 1ce61ae9cd..63f9191a9b 100644 --- a/google/cloud/aiplatform_v1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1/types/dataset_service.py @@ -499,7 +499,7 @@ class ListSavedQueriesResponse(proto.Message): Attributes: saved_queries (Sequence[google.cloud.aiplatform_v1.types.SavedQuery]): - A list of SavedQueries that match the + A list of SavedQueries that matches the specified filter in the request. next_page_token (str): The standard List next-page token. diff --git a/google/cloud/aiplatform_v1/types/job_service.py b/google/cloud/aiplatform_v1/types/job_service.py index 483c046086..7d2e5140d0 100644 --- a/google/cloud/aiplatform_v1/types/job_service.py +++ b/google/cloud/aiplatform_v1/types/job_service.py @@ -127,19 +127,19 @@ class ListCustomJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -288,19 +288,19 @@ class ListDataLabelingJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -460,19 +460,19 @@ class ListHyperparameterTuningJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -627,21 +627,21 @@ class ListBatchPredictionJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. - - - ``model_display_name`` supports = and != + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``model_display_name`` supports ``=``, ``!=`` + comparisons. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -922,6 +922,22 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): Format: ``projects/{project}/locations/{location}`` filter (str): The standard list filter. + + Supported fields: + + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. + + Some examples of using the filter are: + + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` + - ``NOT display_name="my_job"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1/types/model.py b/google/cloud/aiplatform_v1/types/model.py index 8c3ca651c5..0dbb2ad510 100644 --- a/google/cloud/aiplatform_v1/types/model.py +++ b/google/cloud/aiplatform_v1/types/model.py @@ -281,6 +281,7 @@ class DeploymentResourcesType(proto.Enum): DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED = 0 DEDICATED_RESOURCES = 1 AUTOMATIC_RESOURCES = 2 + SHARED_RESOURCES = 3 class ExportFormat(proto.Message): r"""Represents export format supported by the Model. diff --git a/google/cloud/aiplatform_v1/types/pipeline_service.py b/google/cloud/aiplatform_v1/types/pipeline_service.py index 4e197a08e1..ae71535469 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1/types/pipeline_service.py @@ -89,21 +89,26 @@ class ListTrainingPipelinesRequest(proto.Message): TrainingPipelines from. Format: ``projects/{project}/locations/{location}`` filter (str): - The standard list filter. Supported fields: + The standard list filter. - - ``display_name`` supports = and !=. + Supported fields: - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``training_task_definition`` ``=``, ``!=`` comparisons, + and ``:`` wildcard. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="PIPELINE_STATE_SUCCEEDED" AND display_name="my_pipeline"`` - - - ``state="PIPELINE_STATE_RUNNING" OR display_name="my_pipeline"`` - + - ``state="PIPELINE_STATE_SUCCEEDED" AND display_name:"my_pipeline_*"`` + - ``state!="PIPELINE_STATE_FAILED" OR display_name="my_pipeline"`` - ``NOT display_name="my_pipeline"`` - - - ``state="PIPELINE_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` + - ``training_task_definition:"*automl_text_classification*"`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1/types/saved_query.py b/google/cloud/aiplatform_v1/types/saved_query.py index 0afba76023..814ff195fb 100644 --- a/google/cloud/aiplatform_v1/types/saved_query.py +++ b/google/cloud/aiplatform_v1/types/saved_query.py @@ -67,9 +67,9 @@ class SavedQuery(proto.Message): Output only. Number of AnnotationSpecs in the context of the SavedQuery. etag (str): - Used to perform a consistent - read-modify-write update. If not set, a blind - "overwrite" update happens. + Used to perform consistent read-modify-write + updates. If not set, a blind "overwrite" update + happens. support_automl_training (bool): Output only. If the Annotations belonging to the SavedQuery can be used for AutoML training. diff --git a/google/cloud/aiplatform_v1/types/study.py b/google/cloud/aiplatform_v1/types/study.py index 329b499d5d..b76e693320 100644 --- a/google/cloud/aiplatform_v1/types/study.py +++ b/google/cloud/aiplatform_v1/types/study.py @@ -285,8 +285,8 @@ class StudySpec(proto.Message): observation_noise (google.cloud.aiplatform_v1.types.StudySpec.ObservationNoise): The observation noise level of the study. Currently only supported by the Vertex AI Vizier - service. Not supported by HyperparamterTuningJob - or TrainingPipeline. + service. Not supported by + HyperparameterTuningJob or TrainingPipeline. measurement_selection_type (google.cloud.aiplatform_v1.types.StudySpec.MeasurementSelectionType): Describe which measurement selection type will be used @@ -419,7 +419,8 @@ class DoubleValueSpec(proto.Message): that there is no offered starting point. Currently only supported by the Vertex AI Vizier service. - Not supported by HyperparamterTuningJob or TrainingPipeline. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -454,7 +455,8 @@ class IntegerValueSpec(proto.Message): that there is no offered starting point. Currently only supported by the Vertex AI Vizier service. - Not supported by HyperparamterTuningJob or TrainingPipeline. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -484,8 +486,9 @@ class CategoricalValueSpec(proto.Message): assumed to be a relatively good starting point. Unset value signals that there is no offered starting point. - Currently only supported by the Vizier service. Not - supported by HyperparamterTuningJob or TrainingPipeline. + Currently only supported by the Vertex AI Vizier service. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -517,8 +520,9 @@ class DiscreteValueSpec(proto.Message): that there is no offered starting point. It automatically rounds to the nearest feasible discrete point. - Currently only supported by the Vizier service. Not - supported by HyperparamterTuningJob or TrainingPipeline. + Currently only supported by the Vertex AI Vizier service. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ diff --git a/google/cloud/aiplatform_v1/types/types.py b/google/cloud/aiplatform_v1/types/types.py index 6e660c00ef..87bb3c178e 100644 --- a/google/cloud/aiplatform_v1/types/types.py +++ b/google/cloud/aiplatform_v1/types/types.py @@ -46,7 +46,7 @@ class DoubleArray(proto.Message): Attributes: values (Sequence[float]): - A list of bool values. + A list of double values. """ values = proto.RepeatedField( diff --git a/google/cloud/aiplatform_v1beta1/__init__.py b/google/cloud/aiplatform_v1beta1/__init__.py index 28c4e862f9..ccb19b424c 100644 --- a/google/cloud/aiplatform_v1beta1/__init__.py +++ b/google/cloud/aiplatform_v1beta1/__init__.py @@ -16,6 +16,12 @@ from .services.dataset_service import DatasetServiceClient from .services.dataset_service import DatasetServiceAsyncClient +from .services.deployment_resource_pool_service import ( + DeploymentResourcePoolServiceClient, +) +from .services.deployment_resource_pool_service import ( + DeploymentResourcePoolServiceAsyncClient, +) from .services.endpoint_service import EndpointServiceClient from .services.endpoint_service import EndpointServiceAsyncClient from .services.featurestore_online_serving_service import ( @@ -92,6 +98,20 @@ from .types.dataset_service import UpdateDatasetRequest from .types.deployed_index_ref import DeployedIndexRef from .types.deployed_model_ref import DeployedModelRef +from .types.deployment_resource_pool import DeploymentResourcePool +from .types.deployment_resource_pool_service import ( + CreateDeploymentResourcePoolOperationMetadata, +) +from .types.deployment_resource_pool_service import CreateDeploymentResourcePoolRequest +from .types.deployment_resource_pool_service import DeleteDeploymentResourcePoolRequest +from .types.deployment_resource_pool_service import GetDeploymentResourcePoolRequest +from .types.deployment_resource_pool_service import ListDeploymentResourcePoolsRequest +from .types.deployment_resource_pool_service import ListDeploymentResourcePoolsResponse +from .types.deployment_resource_pool_service import QueryDeployedModelsRequest +from .types.deployment_resource_pool_service import QueryDeployedModelsResponse +from .types.deployment_resource_pool_service import ( + UpdateDeploymentResourcePoolOperationMetadata, +) from .types.encryption_spec import EncryptionSpec from .types.endpoint import DeployedModel from .types.endpoint import Endpoint @@ -508,6 +528,7 @@ __all__ = ( "DatasetServiceAsyncClient", + "DeploymentResourcePoolServiceAsyncClient", "EndpointServiceAsyncClient", "FeaturestoreOnlineServingServiceAsyncClient", "FeaturestoreServiceAsyncClient", @@ -582,6 +603,8 @@ "CreateDataLabelingJobRequest", "CreateDatasetOperationMetadata", "CreateDatasetRequest", + "CreateDeploymentResourcePoolOperationMetadata", + "CreateDeploymentResourcePoolRequest", "CreateEndpointOperationMetadata", "CreateEndpointRequest", "CreateEntityTypeOperationMetadata", @@ -626,6 +649,7 @@ "DeleteCustomJobRequest", "DeleteDataLabelingJobRequest", "DeleteDatasetRequest", + "DeleteDeploymentResourcePoolRequest", "DeleteEndpointRequest", "DeleteEntityTypeRequest", "DeleteExecutionRequest", @@ -660,6 +684,8 @@ "DeployedIndexRef", "DeployedModel", "DeployedModelRef", + "DeploymentResourcePool", + "DeploymentResourcePoolServiceClient", "DestinationFeatureSetting", "DiskSpec", "DoubleArray", @@ -716,6 +742,7 @@ "GetCustomJobRequest", "GetDataLabelingJobRequest", "GetDatasetRequest", + "GetDeploymentResourcePoolRequest", "GetEndpointRequest", "GetEntityTypeRequest", "GetExecutionRequest", @@ -776,6 +803,8 @@ "ListDataLabelingJobsResponse", "ListDatasetsRequest", "ListDatasetsResponse", + "ListDeploymentResourcePoolsRequest", + "ListDeploymentResourcePoolsResponse", "ListEndpointsRequest", "ListEndpointsResponse", "ListEntityTypesRequest", @@ -891,6 +920,8 @@ "PythonPackageSpec", "QueryArtifactLineageSubgraphRequest", "QueryContextLineageSubgraphRequest", + "QueryDeployedModelsRequest", + "QueryDeployedModelsResponse", "QueryExecutionInputsAndOutputsRequest", "RawPredictRequest", "ReadFeatureValuesRequest", @@ -951,6 +982,7 @@ "UpdateArtifactRequest", "UpdateContextRequest", "UpdateDatasetRequest", + "UpdateDeploymentResourcePoolOperationMetadata", "UpdateEndpointRequest", "UpdateEntityTypeRequest", "UpdateExecutionRequest", diff --git a/google/cloud/aiplatform_v1beta1/gapic_metadata.json b/google/cloud/aiplatform_v1beta1/gapic_metadata.json index 485d4c41c8..13a15e75c7 100644 --- a/google/cloud/aiplatform_v1beta1/gapic_metadata.json +++ b/google/cloud/aiplatform_v1beta1/gapic_metadata.json @@ -129,6 +129,70 @@ } } }, + "DeploymentResourcePoolService": { + "clients": { + "grpc": { + "libraryClient": "DeploymentResourcePoolServiceClient", + "rpcs": { + "CreateDeploymentResourcePool": { + "methods": [ + "create_deployment_resource_pool" + ] + }, + "DeleteDeploymentResourcePool": { + "methods": [ + "delete_deployment_resource_pool" + ] + }, + "GetDeploymentResourcePool": { + "methods": [ + "get_deployment_resource_pool" + ] + }, + "ListDeploymentResourcePools": { + "methods": [ + "list_deployment_resource_pools" + ] + }, + "QueryDeployedModels": { + "methods": [ + "query_deployed_models" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DeploymentResourcePoolServiceAsyncClient", + "rpcs": { + "CreateDeploymentResourcePool": { + "methods": [ + "create_deployment_resource_pool" + ] + }, + "DeleteDeploymentResourcePool": { + "methods": [ + "delete_deployment_resource_pool" + ] + }, + "GetDeploymentResourcePool": { + "methods": [ + "get_deployment_resource_pool" + ] + }, + "ListDeploymentResourcePools": { + "methods": [ + "list_deployment_resource_pools" + ] + }, + "QueryDeployedModels": { + "methods": [ + "query_deployed_models" + ] + } + } + } + } + }, "EndpointService": { "clients": { "grpc": { diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index 34efdf2476..2ce17e6833 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -745,9 +745,6 @@ async def sample_delete_dataset(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index d652e1348b..63d9e3bf38 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -1058,9 +1058,6 @@ def sample_delete_dataset(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/__init__.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/__init__.py new file mode 100644 index 0000000000..11601f1ec9 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DeploymentResourcePoolServiceClient +from .async_client import DeploymentResourcePoolServiceAsyncClient + +__all__ = ( + "DeploymentResourcePoolServiceClient", + "DeploymentResourcePoolServiceAsyncClient", +) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py new file mode 100644 index 0000000000..43a4712437 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py @@ -0,0 +1,1529 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation as gac_operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool as gca_deployment_resource_pool, +) +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import endpoint +from google.cloud.aiplatform_v1beta1.types import machine_resources +from google.cloud.aiplatform_v1beta1.types import operation as gca_operation +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DeploymentResourcePoolServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DeploymentResourcePoolServiceGrpcAsyncIOTransport +from .client import DeploymentResourcePoolServiceClient + + +class DeploymentResourcePoolServiceAsyncClient: + """A service that manages the DeploymentResourcePool resource.""" + + _client: DeploymentResourcePoolServiceClient + + DEFAULT_ENDPOINT = DeploymentResourcePoolServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DeploymentResourcePoolServiceClient.DEFAULT_MTLS_ENDPOINT + + deployment_resource_pool_path = staticmethod( + DeploymentResourcePoolServiceClient.deployment_resource_pool_path + ) + parse_deployment_resource_pool_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_deployment_resource_pool_path + ) + endpoint_path = staticmethod(DeploymentResourcePoolServiceClient.endpoint_path) + parse_endpoint_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_endpoint_path + ) + model_path = staticmethod(DeploymentResourcePoolServiceClient.model_path) + parse_model_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_model_path + ) + common_billing_account_path = staticmethod( + DeploymentResourcePoolServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + DeploymentResourcePoolServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DeploymentResourcePoolServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + DeploymentResourcePoolServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + DeploymentResourcePoolServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + DeploymentResourcePoolServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentResourcePoolServiceAsyncClient: The constructed client. + """ + return DeploymentResourcePoolServiceClient.from_service_account_info.__func__(DeploymentResourcePoolServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentResourcePoolServiceAsyncClient: The constructed client. + """ + return DeploymentResourcePoolServiceClient.from_service_account_file.__func__(DeploymentResourcePoolServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DeploymentResourcePoolServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DeploymentResourcePoolServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DeploymentResourcePoolServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DeploymentResourcePoolServiceClient).get_transport_class, + type(DeploymentResourcePoolServiceClient), + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, DeploymentResourcePoolServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the deployment resource pool service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DeploymentResourcePoolServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DeploymentResourcePoolServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest, dict + ] = None, + *, + parent: str = None, + deployment_resource_pool: gca_deployment_resource_pool.DeploymentResourcePool = None, + deployment_resource_pool_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + async def sample_create_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + deployment_resource_pool = aiplatform_v1beta1.DeploymentResourcePool() + deployment_resource_pool.dedicated_resources.min_replica_count = 1803 + + request = aiplatform_v1beta1.CreateDeploymentResourcePoolRequest( + parent="parent_value", + deployment_resource_pool=deployment_resource_pool, + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Make the request + operation = client.create_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.CreateDeploymentResourcePoolRequest, dict]): + The request object. Request message for + CreateDeploymentResourcePool method. + parent (:class:`str`): + Required. The parent location + resource where this + DeploymentResourcePool will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_resource_pool (:class:`google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool`): + Required. The DeploymentResourcePool + to create. + + This corresponds to the ``deployment_resource_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_resource_pool_id (:class:`str`): + Required. The ID to use for the DeploymentResourcePool, + which will become the final component of the + DeploymentResourcePool's resource name. + + The maximum length is 63 characters, and valid + characters are ``/^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$/``. + + This corresponds to the ``deployment_resource_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool` A description of resources that can be shared by multiple DeployedModels, + whose underlying specification consists of a + DedicatedResources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, deployment_resource_pool, deployment_resource_pool_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment_resource_pool_service.CreateDeploymentResourcePoolRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment_resource_pool is not None: + request.deployment_resource_pool = deployment_resource_pool + if deployment_resource_pool_id is not None: + request.deployment_resource_pool_id = deployment_resource_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deployment_resource_pool, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gca_deployment_resource_pool.DeploymentResourcePool, + metadata_type=deployment_resource_pool_service.CreateDeploymentResourcePoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.GetDeploymentResourcePoolRequest, dict + ] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment_resource_pool.DeploymentResourcePool: + r"""Get a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + async def sample_get_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.GetDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_resource_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.GetDeploymentResourcePoolRequest, dict]): + The request object. Request message for + GetDeploymentResourcePool method. + name (:class:`str`): + Required. The name of the DeploymentResourcePool to + retrieve. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool: + A description of resources that can + be shared by multiple DeployedModels, + whose underlying specification consists + of a DedicatedResources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment_resource_pool_service.GetDeploymentResourcePoolRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deployment_resource_pool, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deployment_resource_pools( + self, + request: Union[ + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, dict + ] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentResourcePoolsAsyncPager: + r"""List DeploymentResourcePools in a location. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + async def sample_list_deployment_resource_pools(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.ListDeploymentResourcePoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_resource_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest, dict]): + The request object. Request message for + ListDeploymentResourcePools method. + parent (:class:`str`): + Required. The parent Location which + owns this collection of + DeploymentResourcePools. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.ListDeploymentResourcePoolsAsyncPager: + Response message for + ListDeploymentResourcePools method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment_resource_pool_service.ListDeploymentResourcePoolsRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deployment_resource_pools, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentResourcePoolsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest, dict + ] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + async def sample_delete_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.DeleteDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.DeleteDeploymentResourcePoolRequest, dict]): + The request object. Request message for + DeleteDeploymentResourcePool method. + name (:class:`str`): + Required. The name of the DeploymentResourcePool to + delete. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deployment_resource_pool, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=gca_operation.DeleteOperationMetadata, + ) + + # Done; return the response. + return response + + async def query_deployed_models( + self, + request: Union[ + deployment_resource_pool_service.QueryDeployedModelsRequest, dict + ] = None, + *, + deployment_resource_pool: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.QueryDeployedModelsAsyncPager: + r"""List DeployedModels that have been deployed on this + DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + async def sample_query_deployed_models(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.QueryDeployedModelsRequest( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Make the request + page_result = client.query_deployed_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest, dict]): + The request object. Request message for + QueryDeployedModels method. + deployment_resource_pool (:class:`str`): + Required. The name of the target DeploymentResourcePool + to query. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``deployment_resource_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.QueryDeployedModelsAsyncPager: + Response message for + QueryDeployedModels method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment_resource_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment_resource_pool_service.QueryDeployedModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment_resource_pool is not None: + request.deployment_resource_pool = deployment_resource_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_deployed_models, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment_resource_pool", request.deployment_resource_pool),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.QueryDeployedModelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def wait_operation( + self, + request: operations_pb2.WaitOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-aiplatform", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DeploymentResourcePoolServiceAsyncClient",) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py new file mode 100644 index 0000000000..fe13aa7e5c --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py @@ -0,0 +1,1818 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation as gac_operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool as gca_deployment_resource_pool, +) +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import endpoint +from google.cloud.aiplatform_v1beta1.types import machine_resources +from google.cloud.aiplatform_v1beta1.types import operation as gca_operation +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DeploymentResourcePoolServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DeploymentResourcePoolServiceGrpcTransport +from .transports.grpc_asyncio import DeploymentResourcePoolServiceGrpcAsyncIOTransport + + +class DeploymentResourcePoolServiceClientMeta(type): + """Metaclass for the DeploymentResourcePoolService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DeploymentResourcePoolServiceTransport]] + _transport_registry["grpc"] = DeploymentResourcePoolServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = DeploymentResourcePoolServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: str = None, + ) -> Type[DeploymentResourcePoolServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DeploymentResourcePoolServiceClient( + metaclass=DeploymentResourcePoolServiceClientMeta +): + """A service that manages the DeploymentResourcePool resource.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "aiplatform.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentResourcePoolServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentResourcePoolServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DeploymentResourcePoolServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DeploymentResourcePoolServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deployment_resource_pool_path( + project: str, + location: str, + deployment_resource_pool: str, + ) -> str: + """Returns a fully-qualified deployment_resource_pool string.""" + return "projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}".format( + project=project, + location=location, + deployment_resource_pool=deployment_resource_pool, + ) + + @staticmethod + def parse_deployment_resource_pool_path(path: str) -> Dict[str, str]: + """Parses a deployment_resource_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deploymentResourcePools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: + """Returns a fully-qualified endpoint string.""" + return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( + project=project, + location=location, + endpoint=endpoint, + ) + + @staticmethod + def parse_endpoint_path(path: str) -> Dict[str, str]: + """Parses a endpoint path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/endpoints/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DeploymentResourcePoolServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the deployment resource pool service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DeploymentResourcePoolServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DeploymentResourcePoolServiceTransport): + # transport is a DeploymentResourcePoolServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest, dict + ] = None, + *, + parent: str = None, + deployment_resource_pool: gca_deployment_resource_pool.DeploymentResourcePool = None, + deployment_resource_pool_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Create a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + def sample_create_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + deployment_resource_pool = aiplatform_v1beta1.DeploymentResourcePool() + deployment_resource_pool.dedicated_resources.min_replica_count = 1803 + + request = aiplatform_v1beta1.CreateDeploymentResourcePoolRequest( + parent="parent_value", + deployment_resource_pool=deployment_resource_pool, + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Make the request + operation = client.create_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.CreateDeploymentResourcePoolRequest, dict]): + The request object. Request message for + CreateDeploymentResourcePool method. + parent (str): + Required. The parent location + resource where this + DeploymentResourcePool will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_resource_pool (google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool): + Required. The DeploymentResourcePool + to create. + + This corresponds to the ``deployment_resource_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_resource_pool_id (str): + Required. The ID to use for the DeploymentResourcePool, + which will become the final component of the + DeploymentResourcePool's resource name. + + The maximum length is 63 characters, and valid + characters are ``/^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$/``. + + This corresponds to the ``deployment_resource_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool` A description of resources that can be shared by multiple DeployedModels, + whose underlying specification consists of a + DedicatedResources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, deployment_resource_pool, deployment_resource_pool_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment_resource_pool_service.CreateDeploymentResourcePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest, + ): + request = ( + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment_resource_pool is not None: + request.deployment_resource_pool = deployment_resource_pool + if deployment_resource_pool_id is not None: + request.deployment_resource_pool_id = deployment_resource_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_deployment_resource_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gca_deployment_resource_pool.DeploymentResourcePool, + metadata_type=deployment_resource_pool_service.CreateDeploymentResourcePoolOperationMetadata, + ) + + # Done; return the response. + return response + + def get_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.GetDeploymentResourcePoolRequest, dict + ] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment_resource_pool.DeploymentResourcePool: + r"""Get a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + def sample_get_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.GetDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_resource_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.GetDeploymentResourcePoolRequest, dict]): + The request object. Request message for + GetDeploymentResourcePool method. + name (str): + Required. The name of the DeploymentResourcePool to + retrieve. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool: + A description of resources that can + be shared by multiple DeployedModels, + whose underlying specification consists + of a DedicatedResources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment_resource_pool_service.GetDeploymentResourcePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, deployment_resource_pool_service.GetDeploymentResourcePoolRequest + ): + request = deployment_resource_pool_service.GetDeploymentResourcePoolRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_deployment_resource_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deployment_resource_pools( + self, + request: Union[ + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, dict + ] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentResourcePoolsPager: + r"""List DeploymentResourcePools in a location. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + def sample_list_deployment_resource_pools(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.ListDeploymentResourcePoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_resource_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest, dict]): + The request object. Request message for + ListDeploymentResourcePools method. + parent (str): + Required. The parent Location which + owns this collection of + DeploymentResourcePools. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.ListDeploymentResourcePoolsPager: + Response message for + ListDeploymentResourcePools method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment_resource_pool_service.ListDeploymentResourcePoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, deployment_resource_pool_service.ListDeploymentResourcePoolsRequest + ): + request = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_deployment_resource_pools + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentResourcePoolsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deployment_resource_pool( + self, + request: Union[ + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest, dict + ] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Delete a DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + def sample_delete_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.DeleteDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.DeleteDeploymentResourcePoolRequest, dict]): + The request object. Request message for + DeleteDeploymentResourcePool method. + name (str): + Required. The name of the DeploymentResourcePool to + delete. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest, + ): + request = ( + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_deployment_resource_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=gca_operation.DeleteOperationMetadata, + ) + + # Done; return the response. + return response + + def query_deployed_models( + self, + request: Union[ + deployment_resource_pool_service.QueryDeployedModelsRequest, dict + ] = None, + *, + deployment_resource_pool: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.QueryDeployedModelsPager: + r"""List DeployedModels that have been deployed on this + DeploymentResourcePool. + + .. code-block:: python + + from google.cloud import aiplatform_v1beta1 + + def sample_query_deployed_models(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.QueryDeployedModelsRequest( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Make the request + page_result = client.query_deployed_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest, dict]): + The request object. Request message for + QueryDeployedModels method. + deployment_resource_pool (str): + Required. The name of the target DeploymentResourcePool + to query. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + + This corresponds to the ``deployment_resource_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.QueryDeployedModelsPager: + Response message for + QueryDeployedModels method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment_resource_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment_resource_pool_service.QueryDeployedModelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, deployment_resource_pool_service.QueryDeployedModelsRequest + ): + request = deployment_resource_pool_service.QueryDeployedModelsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment_resource_pool is not None: + request.deployment_resource_pool = deployment_resource_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_deployed_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment_resource_pool", request.deployment_resource_pool),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.QueryDeployedModelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def wait_operation( + self, + request: operations_pb2.WaitOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-aiplatform", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DeploymentResourcePoolServiceClient",) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py new file mode 100644 index 0000000000..0fa12cbb0e --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, +) + +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import endpoint + + +class ListDeploymentResourcePoolsPager: + """A pager for iterating through ``list_deployment_resource_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployment_resource_pools`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeploymentResourcePools`` requests and continue to iterate + through the ``deployment_resource_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., deployment_resource_pool_service.ListDeploymentResourcePoolsResponse + ], + request: deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, + response: deployment_resource_pool_service.ListDeploymentResourcePoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest): + The initial request object. + response (google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[deployment_resource_pool_service.ListDeploymentResourcePoolsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[deployment_resource_pool.DeploymentResourcePool]: + for page in self.pages: + yield from page.deployment_resource_pools + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentResourcePoolsAsyncPager: + """A pager for iterating through ``list_deployment_resource_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployment_resource_pools`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeploymentResourcePools`` requests and continue to iterate + through the ``deployment_resource_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse + ], + ], + request: deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, + response: deployment_resource_pool_service.ListDeploymentResourcePoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest): + The initial request object. + response (google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[deployment_resource_pool.DeploymentResourcePool]: + async def async_generator(): + async for page in self.pages: + for response in page.deployment_resource_pools: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class QueryDeployedModelsPager: + """A pager for iterating through ``query_deployed_models`` requests. + + This class thinly wraps an initial + :class:`google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployed_models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``QueryDeployedModels`` requests and continue to iterate + through the ``deployed_models`` field on the + corresponding responses. + + All the usual :class:`google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., deployment_resource_pool_service.QueryDeployedModelsResponse + ], + request: deployment_resource_pool_service.QueryDeployedModelsRequest, + response: deployment_resource_pool_service.QueryDeployedModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest): + The initial request object. + response (google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = deployment_resource_pool_service.QueryDeployedModelsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[deployment_resource_pool_service.QueryDeployedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[endpoint.DeployedModel]: + for page in self.pages: + yield from page.deployed_models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class QueryDeployedModelsAsyncPager: + """A pager for iterating through ``query_deployed_models`` requests. + + This class thinly wraps an initial + :class:`google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployed_models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``QueryDeployedModels`` requests and continue to iterate + through the ``deployed_models`` field on the + corresponding responses. + + All the usual :class:`google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[deployment_resource_pool_service.QueryDeployedModelsResponse] + ], + request: deployment_resource_pool_service.QueryDeployedModelsRequest, + response: deployment_resource_pool_service.QueryDeployedModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest): + The initial request object. + response (google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = deployment_resource_pool_service.QueryDeployedModelsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[deployment_resource_pool_service.QueryDeployedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[endpoint.DeployedModel]: + async def async_generator(): + async for page in self.pages: + for response in page.deployed_models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py new file mode 100644 index 0000000000..9c44a4a735 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DeploymentResourcePoolServiceTransport +from .grpc import DeploymentResourcePoolServiceGrpcTransport +from .grpc_asyncio import DeploymentResourcePoolServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DeploymentResourcePoolServiceTransport]] +_transport_registry["grpc"] = DeploymentResourcePoolServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DeploymentResourcePoolServiceGrpcAsyncIOTransport + +__all__ = ( + "DeploymentResourcePoolServiceTransport", + "DeploymentResourcePoolServiceGrpcTransport", + "DeploymentResourcePoolServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py new file mode 100644 index 0000000000..c373c179a6 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-aiplatform", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class DeploymentResourcePoolServiceTransport(abc.ABC): + """Abstract transport class for DeploymentResourcePoolService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "aiplatform.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_deployment_resource_pool: gapic_v1.method.wrap_method( + self.create_deployment_resource_pool, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment_resource_pool: gapic_v1.method.wrap_method( + self.get_deployment_resource_pool, + default_timeout=None, + client_info=client_info, + ), + self.list_deployment_resource_pools: gapic_v1.method.wrap_method( + self.list_deployment_resource_pools, + default_timeout=None, + client_info=client_info, + ), + self.delete_deployment_resource_pool: gapic_v1.method.wrap_method( + self.delete_deployment_resource_pool, + default_timeout=None, + client_info=client_info, + ), + self.query_deployed_models: gapic_v1.method.wrap_method( + self.query_deployed_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.CreateDeploymentResourcePoolRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.GetDeploymentResourcePoolRequest], + Union[ + deployment_resource_pool.DeploymentResourcePool, + Awaitable[deployment_resource_pool.DeploymentResourcePool], + ], + ]: + raise NotImplementedError() + + @property + def list_deployment_resource_pools( + self, + ) -> Callable[ + [deployment_resource_pool_service.ListDeploymentResourcePoolsRequest], + Union[ + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse, + Awaitable[ + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def delete_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def query_deployed_models( + self, + ) -> Callable[ + [deployment_resource_pool_service.QueryDeployedModelsRequest], + Union[ + deployment_resource_pool_service.QueryDeployedModelsResponse, + Awaitable[deployment_resource_pool_service.QueryDeployedModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DeploymentResourcePoolServiceTransport",) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py new file mode 100644 index 0000000000..0d2a6d290b --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py @@ -0,0 +1,621 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import DeploymentResourcePoolServiceTransport, DEFAULT_CLIENT_INFO + + +class DeploymentResourcePoolServiceGrpcTransport( + DeploymentResourcePoolServiceTransport +): + """gRPC backend transport for DeploymentResourcePoolService. + + A service that manages the DeploymentResourcePool resource. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "aiplatform.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "aiplatform.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.CreateDeploymentResourcePoolRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create deployment resource + pool method over gRPC. + + Create a DeploymentResourcePool. + + Returns: + Callable[[~.CreateDeploymentResourcePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment_resource_pool" not in self._stubs: + self._stubs[ + "create_deployment_resource_pool" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment_resource_pool"] + + @property + def get_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.GetDeploymentResourcePoolRequest], + deployment_resource_pool.DeploymentResourcePool, + ]: + r"""Return a callable for the get deployment resource pool method over gRPC. + + Get a DeploymentResourcePool. + + Returns: + Callable[[~.GetDeploymentResourcePoolRequest], + ~.DeploymentResourcePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_resource_pool" not in self._stubs: + self._stubs["get_deployment_resource_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) + return self._stubs["get_deployment_resource_pool"] + + @property + def list_deployment_resource_pools( + self, + ) -> Callable[ + [deployment_resource_pool_service.ListDeploymentResourcePoolsRequest], + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse, + ]: + r"""Return a callable for the list deployment resource pools method over gRPC. + + List DeploymentResourcePools in a location. + + Returns: + Callable[[~.ListDeploymentResourcePoolsRequest], + ~.ListDeploymentResourcePoolsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_resource_pools" not in self._stubs: + self._stubs[ + "list_deployment_resource_pools" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) + return self._stubs["list_deployment_resource_pools"] + + @property + def delete_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete deployment resource + pool method over gRPC. + + Delete a DeploymentResourcePool. + + Returns: + Callable[[~.DeleteDeploymentResourcePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment_resource_pool" not in self._stubs: + self._stubs[ + "delete_deployment_resource_pool" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment_resource_pool"] + + @property + def query_deployed_models( + self, + ) -> Callable[ + [deployment_resource_pool_service.QueryDeployedModelsRequest], + deployment_resource_pool_service.QueryDeployedModelsResponse, + ]: + r"""Return a callable for the query deployed models method over gRPC. + + List DeployedModels that have been deployed on this + DeploymentResourcePool. + + Returns: + Callable[[~.QueryDeployedModelsRequest], + ~.QueryDeployedModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_deployed_models" not in self._stubs: + self._stubs["query_deployed_models"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/QueryDeployedModels", + request_serializer=deployment_resource_pool_service.QueryDeployedModelsRequest.serialize, + response_deserializer=deployment_resource_pool_service.QueryDeployedModelsResponse.deserialize, + ) + return self._stubs["query_deployed_models"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DeploymentResourcePoolServiceGrpcTransport",) diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py new file mode 100644 index 0000000000..f4b919e5a6 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py @@ -0,0 +1,622 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import DeploymentResourcePoolServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DeploymentResourcePoolServiceGrpcTransport + + +class DeploymentResourcePoolServiceGrpcAsyncIOTransport( + DeploymentResourcePoolServiceTransport +): + """gRPC AsyncIO backend transport for DeploymentResourcePoolService. + + A service that manages the DeploymentResourcePool resource. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "aiplatform.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "aiplatform.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.CreateDeploymentResourcePoolRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create deployment resource + pool method over gRPC. + + Create a DeploymentResourcePool. + + Returns: + Callable[[~.CreateDeploymentResourcePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment_resource_pool" not in self._stubs: + self._stubs[ + "create_deployment_resource_pool" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment_resource_pool"] + + @property + def get_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.GetDeploymentResourcePoolRequest], + Awaitable[deployment_resource_pool.DeploymentResourcePool], + ]: + r"""Return a callable for the get deployment resource pool method over gRPC. + + Get a DeploymentResourcePool. + + Returns: + Callable[[~.GetDeploymentResourcePoolRequest], + Awaitable[~.DeploymentResourcePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_resource_pool" not in self._stubs: + self._stubs["get_deployment_resource_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) + return self._stubs["get_deployment_resource_pool"] + + @property + def list_deployment_resource_pools( + self, + ) -> Callable[ + [deployment_resource_pool_service.ListDeploymentResourcePoolsRequest], + Awaitable[deployment_resource_pool_service.ListDeploymentResourcePoolsResponse], + ]: + r"""Return a callable for the list deployment resource pools method over gRPC. + + List DeploymentResourcePools in a location. + + Returns: + Callable[[~.ListDeploymentResourcePoolsRequest], + Awaitable[~.ListDeploymentResourcePoolsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_resource_pools" not in self._stubs: + self._stubs[ + "list_deployment_resource_pools" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) + return self._stubs["list_deployment_resource_pools"] + + @property + def delete_deployment_resource_pool( + self, + ) -> Callable[ + [deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete deployment resource + pool method over gRPC. + + Delete a DeploymentResourcePool. + + Returns: + Callable[[~.DeleteDeploymentResourcePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment_resource_pool" not in self._stubs: + self._stubs[ + "delete_deployment_resource_pool" + ] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment_resource_pool"] + + @property + def query_deployed_models( + self, + ) -> Callable[ + [deployment_resource_pool_service.QueryDeployedModelsRequest], + Awaitable[deployment_resource_pool_service.QueryDeployedModelsResponse], + ]: + r"""Return a callable for the query deployed models method over gRPC. + + List DeployedModels that have been deployed on this + DeploymentResourcePool. + + Returns: + Callable[[~.QueryDeployedModelsRequest], + Awaitable[~.QueryDeployedModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_deployed_models" not in self._stubs: + self._stubs["query_deployed_models"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/QueryDeployedModels", + request_serializer=deployment_resource_pool_service.QueryDeployedModelsRequest.serialize, + response_deserializer=deployment_resource_pool_service.QueryDeployedModelsResponse.deserialize, + ) + return self._stubs["query_deployed_models"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("DeploymentResourcePoolServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py index 7bf281970f..ee3f45dd5f 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py @@ -59,6 +59,12 @@ class EndpointServiceAsyncClient: DEFAULT_ENDPOINT = EndpointServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = EndpointServiceClient.DEFAULT_MTLS_ENDPOINT + deployment_resource_pool_path = staticmethod( + EndpointServiceClient.deployment_resource_pool_path + ) + parse_deployment_resource_pool_path = staticmethod( + EndpointServiceClient.parse_deployment_resource_pool_path + ) endpoint_path = staticmethod(EndpointServiceClient.endpoint_path) parse_endpoint_path = staticmethod(EndpointServiceClient.parse_endpoint_path) model_path = staticmethod(EndpointServiceClient.model_path) @@ -748,9 +754,6 @@ async def sample_delete_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py index d7f1a3bdd1..2c98fa2cb6 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py @@ -175,6 +175,28 @@ def transport(self) -> EndpointServiceTransport: """ return self._transport + @staticmethod + def deployment_resource_pool_path( + project: str, + location: str, + deployment_resource_pool: str, + ) -> str: + """Returns a fully-qualified deployment_resource_pool string.""" + return "projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}".format( + project=project, + location=location, + deployment_resource_pool=deployment_resource_pool, + ) + + @staticmethod + def parse_deployment_resource_pool_path(path: str) -> Dict[str, str]: + """Parses a deployment_resource_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deploymentResourcePools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def endpoint_path( project: str, @@ -1026,9 +1048,6 @@ def sample_delete_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py index 54aa120a85..d3ba7e2259 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py @@ -794,9 +794,6 @@ async def sample_delete_featurestore(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1404,9 +1401,6 @@ async def sample_delete_entity_type(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2133,9 +2127,6 @@ async def sample_delete_feature(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py index fad8c47515..bf4b73ab8e 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py @@ -1060,9 +1060,6 @@ def sample_delete_featurestore(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1670,9 +1667,6 @@ def sample_delete_entity_type(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2399,9 +2393,6 @@ def sample_delete_feature(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py index 36e86f2ee1..808f28c7d9 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py @@ -727,9 +727,6 @@ async def sample_delete_index_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py index 9e63d592b4..03420f3c41 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py @@ -969,9 +969,6 @@ def sample_delete_index_endpoint(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py index 4c58c9e354..e546811cb9 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py @@ -742,9 +742,6 @@ async def sample_delete_index(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/client.py b/google/cloud/aiplatform_v1beta1/services/index_service/client.py index 011c92c426..3dbc1521b9 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/client.py @@ -987,9 +987,6 @@ def sample_delete_index(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py index 49d8400fd1..b2331938f0 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py @@ -663,9 +663,6 @@ async def sample_delete_custom_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1198,9 +1195,6 @@ async def sample_delete_data_labeling_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1730,9 +1724,6 @@ async def sample_delete_hyperparameter_tuning_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2277,9 +2268,6 @@ async def sample_delete_batch_prediction_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3114,9 +3102,6 @@ async def sample_delete_model_deployment_monitoring_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index ec983aa325..80e47f4f05 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -1081,9 +1081,6 @@ def sample_delete_custom_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1616,9 +1613,6 @@ def sample_delete_data_labeling_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2154,9 +2148,6 @@ def sample_delete_hyperparameter_tuning_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2709,9 +2700,6 @@ def sample_delete_batch_prediction_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3574,9 +3562,6 @@ def sample_delete_model_deployment_monitoring_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py index e72901e551..76753ab511 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py @@ -651,9 +651,6 @@ async def sample_delete_metadata_store(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1205,9 +1202,6 @@ async def sample_delete_artifact(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1871,9 +1865,6 @@ async def sample_delete_context(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2882,9 +2873,6 @@ async def sample_delete_execution(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py index be2a6710ef..2099ed6f28 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py @@ -960,9 +960,6 @@ def sample_delete_metadata_store(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1514,9 +1511,6 @@ def sample_delete_artifact(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2180,9 +2174,6 @@ def sample_delete_context(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3197,9 +3188,6 @@ def sample_delete_execution(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 78a14a8386..f12418a9a5 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -218,40 +218,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def dataset_path( project: str, + location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( + return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod def dataset_path( project: str, - location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( + return "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py index 882cec2da7..b106c8802c 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py @@ -1000,9 +1000,6 @@ async def sample_delete_model(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1129,9 +1126,6 @@ async def sample_delete_model_version(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_service/client.py index 74717a08b4..8484e4e0fc 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/client.py @@ -1307,9 +1307,6 @@ def sample_delete_model(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1436,9 +1433,6 @@ def sample_delete_model_version(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py index c56a856faf..e5f215925a 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py @@ -639,9 +639,6 @@ async def sample_delete_training_pipeline(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1179,9 +1176,6 @@ async def sample_delete_pipeline_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index 7aac90a5e4..6169f953f7 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -1023,9 +1023,6 @@ def sample_delete_training_pipeline(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1563,9 +1560,6 @@ def sample_delete_pipeline_job(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py index bc0e75d038..67134cf9cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py @@ -644,9 +644,6 @@ async def sample_delete_specialist_pool(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py index 5772bea371..1fe1f7ac36 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py @@ -862,9 +862,6 @@ def sample_delete_specialist_pool(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py index ebfebeb5af..4f5691a77f 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py @@ -786,9 +786,6 @@ async def sample_delete_tensorboard(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1363,9 +1360,6 @@ async def sample_delete_tensorboard_experiment(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2056,9 +2050,6 @@ async def sample_delete_tensorboard_run(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2750,9 +2741,6 @@ async def sample_delete_tensorboard_time_series(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py index a9ce773344..2a408b4251 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py @@ -1062,9 +1062,6 @@ def sample_delete_tensorboard(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1653,9 +1650,6 @@ def sample_delete_tensorboard_experiment(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2354,9 +2348,6 @@ def sample_delete_tensorboard_run(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -3068,9 +3059,6 @@ def sample_delete_tensorboard_time_series(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/aiplatform_v1beta1/types/__init__.py b/google/cloud/aiplatform_v1beta1/types/__init__.py index 40a3045cfc..fc4ad4db89 100644 --- a/google/cloud/aiplatform_v1beta1/types/__init__.py +++ b/google/cloud/aiplatform_v1beta1/types/__init__.py @@ -81,6 +81,20 @@ from .deployed_model_ref import ( DeployedModelRef, ) +from .deployment_resource_pool import ( + DeploymentResourcePool, +) +from .deployment_resource_pool_service import ( + CreateDeploymentResourcePoolOperationMetadata, + CreateDeploymentResourcePoolRequest, + DeleteDeploymentResourcePoolRequest, + GetDeploymentResourcePoolRequest, + ListDeploymentResourcePoolsRequest, + ListDeploymentResourcePoolsResponse, + QueryDeployedModelsRequest, + QueryDeployedModelsResponse, + UpdateDeploymentResourcePoolOperationMetadata, +) from .encryption_spec import ( EncryptionSpec, ) @@ -642,6 +656,16 @@ "UpdateDatasetRequest", "DeployedIndexRef", "DeployedModelRef", + "DeploymentResourcePool", + "CreateDeploymentResourcePoolOperationMetadata", + "CreateDeploymentResourcePoolRequest", + "DeleteDeploymentResourcePoolRequest", + "GetDeploymentResourcePoolRequest", + "ListDeploymentResourcePoolsRequest", + "ListDeploymentResourcePoolsResponse", + "QueryDeployedModelsRequest", + "QueryDeployedModelsResponse", + "UpdateDeploymentResourcePoolOperationMetadata", "EncryptionSpec", "DeployedModel", "Endpoint", diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index d04ddb4856..4cbb27b616 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -165,7 +165,7 @@ class CustomJob(proto.Message): class CustomJobSpec(proto.Message): r"""Represents the spec of a CustomJob. - Next Id: 14 + Next Id: 15 Attributes: worker_pool_specs (Sequence[google.cloud.aiplatform_v1beta1.types.WorkerPoolSpec]): @@ -183,7 +183,7 @@ class CustomJobSpec(proto.Message): Agent `__ for the CustomJob's project is used. network (str): - The full name of the Compute Engine + Optional. The full name of the Compute Engine `network `__ to which the Job should be peered. For example, ``projects/12345/global/networks/myVPC``. diff --git a/google/cloud/aiplatform_v1beta1/types/dataset_service.py b/google/cloud/aiplatform_v1beta1/types/dataset_service.py index f6c91efd4f..9722778d6a 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset_service.py @@ -499,7 +499,7 @@ class ListSavedQueriesResponse(proto.Message): Attributes: saved_queries (Sequence[google.cloud.aiplatform_v1beta1.types.SavedQuery]): - A list of SavedQueries that match the + A list of SavedQueries that matches the specified filter in the request. next_page_token (str): The standard List next-page token. diff --git a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py new file mode 100644 index 0000000000..da677e6afc --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.aiplatform_v1beta1.types import machine_resources +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.aiplatform.v1beta1", + manifest={ + "DeploymentResourcePool", + }, +) + + +class DeploymentResourcePool(proto.Message): + r"""A description of resources that can be shared by multiple + DeployedModels, whose underlying specification consists of a + DedicatedResources. + + Attributes: + name (str): + Output only. The resource name of the + DeploymentResourcePool. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + dedicated_resources (google.cloud.aiplatform_v1beta1.types.DedicatedResources): + Required. The underlying DedicatedResources + that the DeploymentResourcePool uses. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this + DeploymentResourcePool was created. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + dedicated_resources = proto.Field( + proto.MESSAGE, + number=2, + message=machine_resources.DedicatedResources, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py new file mode 100644 index 0000000000..e80cd7a6f5 --- /dev/null +++ b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.aiplatform_v1beta1.types import deployed_model_ref +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool as gca_deployment_resource_pool, +) +from google.cloud.aiplatform_v1beta1.types import endpoint +from google.cloud.aiplatform_v1beta1.types import operation + + +__protobuf__ = proto.module( + package="google.cloud.aiplatform.v1beta1", + manifest={ + "CreateDeploymentResourcePoolRequest", + "CreateDeploymentResourcePoolOperationMetadata", + "GetDeploymentResourcePoolRequest", + "ListDeploymentResourcePoolsRequest", + "ListDeploymentResourcePoolsResponse", + "UpdateDeploymentResourcePoolOperationMetadata", + "DeleteDeploymentResourcePoolRequest", + "QueryDeployedModelsRequest", + "QueryDeployedModelsResponse", + }, +) + + +class CreateDeploymentResourcePoolRequest(proto.Message): + r"""Request message for CreateDeploymentResourcePool method. + + Attributes: + parent (str): + Required. The parent location resource where + this DeploymentResourcePool will be created. + Format: projects/{project}/locations/{location} + deployment_resource_pool (google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool): + Required. The DeploymentResourcePool to + create. + deployment_resource_pool_id (str): + Required. The ID to use for the DeploymentResourcePool, + which will become the final component of the + DeploymentResourcePool's resource name. + + The maximum length is 63 characters, and valid characters + are ``/^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$/``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + deployment_resource_pool = proto.Field( + proto.MESSAGE, + number=2, + message=gca_deployment_resource_pool.DeploymentResourcePool, + ) + deployment_resource_pool_id = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateDeploymentResourcePoolOperationMetadata(proto.Message): + r"""Runtime operation information for + CreateDeploymentResourcePool method. + + Attributes: + generic_metadata (google.cloud.aiplatform_v1beta1.types.GenericOperationMetadata): + The operation generic information. + """ + + generic_metadata = proto.Field( + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, + ) + + +class GetDeploymentResourcePoolRequest(proto.Message): + r"""Request message for GetDeploymentResourcePool method. + + Attributes: + name (str): + Required. The name of the DeploymentResourcePool to + retrieve. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeploymentResourcePoolsRequest(proto.Message): + r"""Request message for ListDeploymentResourcePools method. + + Attributes: + parent (str): + Required. The parent Location which owns this + collection of DeploymentResourcePools. Format: + projects/{project}/locations/{location} + page_size (int): + The maximum number of DeploymentResourcePools + to return. The service may return fewer than + this value. + page_token (str): + A page token, received from a previous + ``ListDeploymentResourcePools`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListDeploymentResourcePools`` must match the call that + provided the page token. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDeploymentResourcePoolsResponse(proto.Message): + r"""Response message for ListDeploymentResourcePools method. + + Attributes: + deployment_resource_pools (Sequence[google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool]): + The DeploymentResourcePools from the + specified location. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + deployment_resource_pools = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_deployment_resource_pool.DeploymentResourcePool, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateDeploymentResourcePoolOperationMetadata(proto.Message): + r"""Runtime operation information for + UpdateDeploymentResourcePool method. + + Attributes: + generic_metadata (google.cloud.aiplatform_v1beta1.types.GenericOperationMetadata): + The operation generic information. + """ + + generic_metadata = proto.Field( + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, + ) + + +class DeleteDeploymentResourcePoolRequest(proto.Message): + r"""Request message for DeleteDeploymentResourcePool method. + + Attributes: + name (str): + Required. The name of the DeploymentResourcePool to delete. + Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class QueryDeployedModelsRequest(proto.Message): + r"""Request message for QueryDeployedModels method. + + Attributes: + deployment_resource_pool (str): + Required. The name of the target DeploymentResourcePool to + query. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + page_size (int): + The maximum number of DeployedModels to + return. The service may return fewer than this + value. + page_token (str): + A page token, received from a previous + ``QueryDeployedModels`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``QueryDeployedModels`` must match the call that provided + the page token. + """ + + deployment_resource_pool = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class QueryDeployedModelsResponse(proto.Message): + r"""Response message for QueryDeployedModels method. + + Attributes: + deployed_models (Sequence[google.cloud.aiplatform_v1beta1.types.DeployedModel]): + DEPRECATED Use deployed_model_refs instead. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + deployed_model_refs (Sequence[google.cloud.aiplatform_v1beta1.types.DeployedModelRef]): + References to the DeployedModels that share + the specified deploymentResourcePool. + """ + + @property + def raw_page(self): + return self + + deployed_models = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=endpoint.DeployedModel, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + deployed_model_refs = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=deployed_model_ref.DeployedModelRef, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint.py b/google/cloud/aiplatform_v1beta1/types/endpoint.py index fc4fabb8e0..034d8e97fa 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint.py @@ -215,6 +215,12 @@ class DeployedModel(proto.Message): degree are decided by Vertex AI, and require only a modest additional configuration. + This field is a member of `oneof`_ ``prediction_resources``. + shared_resources (str): + The resource name of the shared DeploymentResourcePool to + deploy on. Format: + projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool} + This field is a member of `oneof`_ ``prediction_resources``. id (str): Immutable. The ID of the DeployedModel. If not provided upon @@ -305,6 +311,11 @@ class DeployedModel(proto.Message): oneof="prediction_resources", message=machine_resources.AutomaticResources, ) + shared_resources = proto.Field( + proto.STRING, + number=17, + oneof="prediction_resources", + ) id = proto.Field( proto.STRING, number=1, diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index 73e25db33d..fd099b9ac5 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -129,19 +129,19 @@ class ListCustomJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -290,19 +290,19 @@ class ListDataLabelingJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -462,19 +462,19 @@ class ListHyperparameterTuningJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -629,21 +629,21 @@ class ListBatchPredictionJobsRequest(proto.Message): Supported fields: - - ``display_name`` supports = and !=. - - - ``state`` supports = and !=. - - - ``model_display_name`` supports = and != + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``model_display_name`` supports ``=``, ``!=`` + comparisons. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`` - - - ``state="JOB_STATE_RUNNING" OR display_name="my_job"`` - + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - - - ``state="JOB_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): @@ -924,6 +924,22 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): Format: ``projects/{project}/locations/{location}`` filter (str): The standard list filter. + + Supported fields: + + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. + + Some examples of using the filter are: + + - ``state="JOB_STATE_SUCCEEDED" AND display_name:"my_job_*"`` + - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` + - ``NOT display_name="my_job"`` + - ``create_time>"2021-05-18T00:00:00Z"`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index d315d2952e..28ab5cfda4 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -281,6 +281,7 @@ class DeploymentResourcesType(proto.Enum): DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED = 0 DEDICATED_RESOURCES = 1 AUTOMATIC_RESOURCES = 2 + SHARED_RESOURCES = 3 class ExportFormat(proto.Message): r"""Represents export format supported by the Model. diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 3a084f10b6..9bddf47e5a 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -91,21 +91,26 @@ class ListTrainingPipelinesRequest(proto.Message): TrainingPipelines from. Format: ``projects/{project}/locations/{location}`` filter (str): - The standard list filter. Supported fields: + The standard list filter. - - ``display_name`` supports = and !=. + Supported fields: - - ``state`` supports = and !=. + - ``display_name`` supports ``=``, ``!=`` comparisons, and + ``:`` wildcard. + - ``state`` supports ``=``, ``!=`` comparisons. + - ``training_task_definition`` ``=``, ``!=`` comparisons, + and ``:`` wildcard. + - ``create_time`` supports ``=``, ``!=``,\ ``<``, + ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must + be in RFC 3339 format. Some examples of using the filter are: - - ``state="PIPELINE_STATE_SUCCEEDED" AND display_name="my_pipeline"`` - - - ``state="PIPELINE_STATE_RUNNING" OR display_name="my_pipeline"`` - + - ``state="PIPELINE_STATE_SUCCEEDED" AND display_name:"my_pipeline_*"`` + - ``state!="PIPELINE_STATE_FAILED" OR display_name="my_pipeline"`` - ``NOT display_name="my_pipeline"`` - - - ``state="PIPELINE_STATE_FAILED"`` + - ``create_time>"2021-05-18T00:00:00Z"`` + - ``training_task_definition:"*automl_text_classification*"`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1beta1/types/saved_query.py b/google/cloud/aiplatform_v1beta1/types/saved_query.py index ebcb8e7dd4..abc5ee5856 100644 --- a/google/cloud/aiplatform_v1beta1/types/saved_query.py +++ b/google/cloud/aiplatform_v1beta1/types/saved_query.py @@ -67,9 +67,9 @@ class SavedQuery(proto.Message): Output only. Number of AnnotationSpecs in the context of the SavedQuery. etag (str): - Used to perform a consistent - read-modify-write update. If not set, a blind - "overwrite" update happens. + Used to perform consistent read-modify-write + updates. If not set, a blind "overwrite" update + happens. support_automl_training (bool): Output only. If the Annotations belonging to the SavedQuery can be used for AutoML training. diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index 8e122d12a4..81b87dd94b 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -291,8 +291,8 @@ class StudySpec(proto.Message): observation_noise (google.cloud.aiplatform_v1beta1.types.StudySpec.ObservationNoise): The observation noise level of the study. Currently only supported by the Vertex AI Vizier - service. Not supported by HyperparamterTuningJob - or TrainingPipeline. + service. Not supported by + HyperparameterTuningJob or TrainingPipeline. measurement_selection_type (google.cloud.aiplatform_v1beta1.types.StudySpec.MeasurementSelectionType): Describe which measurement selection type will be used @@ -425,7 +425,8 @@ class DoubleValueSpec(proto.Message): that there is no offered starting point. Currently only supported by the Vertex AI Vizier service. - Not supported by HyperparamterTuningJob or TrainingPipeline. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -460,7 +461,8 @@ class IntegerValueSpec(proto.Message): that there is no offered starting point. Currently only supported by the Vertex AI Vizier service. - Not supported by HyperparamterTuningJob or TrainingPipeline. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -490,8 +492,9 @@ class CategoricalValueSpec(proto.Message): assumed to be a relatively good starting point. Unset value signals that there is no offered starting point. - Currently only supported by the Vizier service. Not - supported by HyperparamterTuningJob or TrainingPipeline. + Currently only supported by the Vertex AI Vizier service. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ @@ -523,8 +526,9 @@ class DiscreteValueSpec(proto.Message): that there is no offered starting point. It automatically rounds to the nearest feasible discrete point. - Currently only supported by the Vizier service. Not - supported by HyperparamterTuningJob or TrainingPipeline. + Currently only supported by the Vertex AI Vizier service. + Not supported by HyperparameterTuningJob or + TrainingPipeline. This field is a member of `oneof`_ ``_default_value``. """ diff --git a/google/cloud/aiplatform_v1beta1/types/types.py b/google/cloud/aiplatform_v1beta1/types/types.py index 1ca3bb86ef..d75de189a8 100644 --- a/google/cloud/aiplatform_v1beta1/types/types.py +++ b/google/cloud/aiplatform_v1beta1/types/types.py @@ -46,7 +46,7 @@ class DoubleArray(proto.Message): Attributes: values (Sequence[float]): - A list of bool values. + A list of double values. """ values = proto.RepeatedField( diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_async.py new file mode 100644 index 0000000000..5ead7a9e1e --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_async] +from google.cloud import aiplatform_v1beta1 + + +async def sample_create_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + deployment_resource_pool = aiplatform_v1beta1.DeploymentResourcePool() + deployment_resource_pool.dedicated_resources.min_replica_count = 1803 + + request = aiplatform_v1beta1.CreateDeploymentResourcePoolRequest( + parent="parent_value", + deployment_resource_pool=deployment_resource_pool, + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Make the request + operation = client.create_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_sync.py new file mode 100644 index 0000000000..d88bfdd973 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_sync] +from google.cloud import aiplatform_v1beta1 + + +def sample_create_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + deployment_resource_pool = aiplatform_v1beta1.DeploymentResourcePool() + deployment_resource_pool.dedicated_resources.min_replica_count = 1803 + + request = aiplatform_v1beta1.CreateDeploymentResourcePoolRequest( + parent="parent_value", + deployment_resource_pool=deployment_resource_pool, + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Make the request + operation = client.create_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_sync] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_async.py new file mode 100644 index 0000000000..4210b506e1 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_async] +from google.cloud import aiplatform_v1beta1 + + +async def sample_delete_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.DeleteDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_sync.py new file mode 100644 index 0000000000..2a71ec0dc9 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_sync] +from google.cloud import aiplatform_v1beta1 + + +def sample_delete_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.DeleteDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_resource_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_sync] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_async.py new file mode 100644 index 0000000000..e95c1060f7 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_async] +from google.cloud import aiplatform_v1beta1 + + +async def sample_get_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.GetDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_resource_pool(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_sync.py new file mode 100644 index 0000000000..646c50268b --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentResourcePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_sync] +from google.cloud import aiplatform_v1beta1 + + +def sample_get_deployment_resource_pool(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.GetDeploymentResourcePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_resource_pool(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_sync] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_async.py new file mode 100644 index 0000000000..f23b215a4d --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentResourcePools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_async] +from google.cloud import aiplatform_v1beta1 + + +async def sample_list_deployment_resource_pools(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.ListDeploymentResourcePoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_resource_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_sync.py new file mode 100644 index 0000000000..95468cab06 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentResourcePools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_sync] +from google.cloud import aiplatform_v1beta1 + + +def sample_list_deployment_resource_pools(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.ListDeploymentResourcePoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_resource_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_sync] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_async.py new file mode 100644 index 0000000000..457632c97b --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDeployedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_async] +from google.cloud import aiplatform_v1beta1 + + +async def sample_query_deployed_models(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.QueryDeployedModelsRequest( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Make the request + page_result = client.query_deployed_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_sync.py new file mode 100644 index 0000000000..6c79372194 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDeployedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_sync] +from google.cloud import aiplatform_v1beta1 + + +def sample_query_deployed_models(): + # Create a client + client = aiplatform_v1beta1.DeploymentResourcePoolServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.QueryDeployedModelsRequest( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Make the request + page_result = client.query_deployed_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_sync] diff --git a/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json b/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json index ffaff71c3d..65b4c2c329 100644 --- a/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json +++ b/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json @@ -1813,6 +1813,827 @@ ], "title": "aiplatform_v1beta1_generated_dataset_service_update_dataset_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient", + "shortName": "DeploymentResourcePoolServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient.create_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.CreateDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "CreateDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.CreateDeploymentResourcePoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment_resource_pool", + "type": "google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool" + }, + { + "name": "deployment_resource_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deployment_resource_pool" + }, + "description": "Sample for CreateDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient", + "shortName": "DeploymentResourcePoolServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient.create_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.CreateDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "CreateDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.CreateDeploymentResourcePoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment_resource_pool", + "type": "google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool" + }, + { + "name": "deployment_resource_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deployment_resource_pool" + }, + "description": "Sample for CreateDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_CreateDeploymentResourcePool_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_create_deployment_resource_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient", + "shortName": "DeploymentResourcePoolServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient.delete_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.DeleteDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "DeleteDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.DeleteDeploymentResourcePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_deployment_resource_pool" + }, + "description": "Sample for DeleteDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient", + "shortName": "DeploymentResourcePoolServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient.delete_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.DeleteDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "DeleteDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.DeleteDeploymentResourcePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_deployment_resource_pool" + }, + "description": "Sample for DeleteDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_DeleteDeploymentResourcePool_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_delete_deployment_resource_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient", + "shortName": "DeploymentResourcePoolServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient.get_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.GetDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "GetDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.GetDeploymentResourcePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool", + "shortName": "get_deployment_resource_pool" + }, + "description": "Sample for GetDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient", + "shortName": "DeploymentResourcePoolServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient.get_deployment_resource_pool", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.GetDeploymentResourcePool", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "GetDeploymentResourcePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.GetDeploymentResourcePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.types.DeploymentResourcePool", + "shortName": "get_deployment_resource_pool" + }, + "description": "Sample for GetDeploymentResourcePool", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_GetDeploymentResourcePool_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_get_deployment_resource_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient", + "shortName": "DeploymentResourcePoolServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient.list_deployment_resource_pools", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.ListDeploymentResourcePools", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "ListDeploymentResourcePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.ListDeploymentResourcePoolsAsyncPager", + "shortName": "list_deployment_resource_pools" + }, + "description": "Sample for ListDeploymentResourcePools", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient", + "shortName": "DeploymentResourcePoolServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient.list_deployment_resource_pools", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.ListDeploymentResourcePools", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "ListDeploymentResourcePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.ListDeploymentResourcePoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.ListDeploymentResourcePoolsPager", + "shortName": "list_deployment_resource_pools" + }, + "description": "Sample for ListDeploymentResourcePools", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_ListDeploymentResourcePools_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_list_deployment_resource_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient", + "shortName": "DeploymentResourcePoolServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient.query_deployed_models", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.QueryDeployedModels", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "QueryDeployedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest" + }, + { + "name": "deployment_resource_pool", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.QueryDeployedModelsAsyncPager", + "shortName": "query_deployed_models" + }, + "description": "Sample for QueryDeployedModels", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient", + "shortName": "DeploymentResourcePoolServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient.query_deployed_models", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService.QueryDeployedModels", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "shortName": "DeploymentResourcePoolService" + }, + "shortName": "QueryDeployedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.QueryDeployedModelsRequest" + }, + { + "name": "deployment_resource_pool", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.pagers.QueryDeployedModelsPager", + "shortName": "query_deployed_models" + }, + "description": "Sample for QueryDeployedModels", + "file": "aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_DeploymentResourcePoolService_QueryDeployedModels_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_deployment_resource_pool_service_query_deployed_models_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py new file mode 100644 index 0000000000..3157157632 --- /dev/null +++ b/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py @@ -0,0 +1,4801 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + DeploymentResourcePoolServiceAsyncClient, +) +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + DeploymentResourcePoolServiceClient, +) +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service import ( + transports, +) +from google.cloud.aiplatform_v1beta1.types import accelerator_type +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool as gca_deployment_resource_pool, +) +from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import endpoint +from google.cloud.aiplatform_v1beta1.types import machine_resources +from google.cloud.aiplatform_v1beta1.types import operation as gca_operation +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DeploymentResourcePoolServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DeploymentResourcePoolServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DeploymentResourcePoolServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + DeploymentResourcePoolServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DeploymentResourcePoolServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + DeploymentResourcePoolServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DeploymentResourcePoolServiceClient, "grpc"), + (DeploymentResourcePoolServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_deployment_resource_pool_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("aiplatform.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DeploymentResourcePoolServiceGrpcTransport, "grpc"), + (transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_deployment_resource_pool_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DeploymentResourcePoolServiceClient, "grpc"), + (DeploymentResourcePoolServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_deployment_resource_pool_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("aiplatform.googleapis.com:443") + + +def test_deployment_resource_pool_service_client_get_transport_class(): + transport = DeploymentResourcePoolServiceClient.get_transport_class() + available_transports = [ + transports.DeploymentResourcePoolServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DeploymentResourcePoolServiceClient.get_transport_class("grpc") + assert transport == transports.DeploymentResourcePoolServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DeploymentResourcePoolServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceClient), +) +@mock.patch.object( + DeploymentResourcePoolServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceAsyncClient), +) +def test_deployment_resource_pool_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + DeploymentResourcePoolServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + DeploymentResourcePoolServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + "true", + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + "false", + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DeploymentResourcePoolServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceClient), +) +@mock.patch.object( + DeploymentResourcePoolServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_deployment_resource_pool_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [DeploymentResourcePoolServiceClient, DeploymentResourcePoolServiceAsyncClient], +) +@mock.patch.object( + DeploymentResourcePoolServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceClient), +) +@mock.patch.object( + DeploymentResourcePoolServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeploymentResourcePoolServiceAsyncClient), +) +def test_deployment_resource_pool_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_deployment_resource_pool_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_deployment_resource_pool_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_deployment_resource_pool_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.transports.DeploymentResourcePoolServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DeploymentResourcePoolServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_deployment_resource_pool_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "aiplatform.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="aiplatform.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest, + dict, + ], +) +def test_create_deployment_resource_pool(request_type, transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.CreateDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_deployment_resource_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + client.create_deployment_resource_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.CreateDeploymentResourcePoolRequest() + ) + + +@pytest.mark.asyncio +async def test_create_deployment_resource_pool_async( + transport: str = "grpc_asyncio", + request_type=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest, +): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.CreateDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_deployment_resource_pool_async_from_dict(): + await test_create_deployment_resource_pool_async(request_type=dict) + + +def test_create_deployment_resource_pool_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.CreateDeploymentResourcePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deployment_resource_pool_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.CreateDeploymentResourcePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_deployment_resource_pool_flattened(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deployment_resource_pool( + parent="parent_value", + deployment_resource_pool=gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ), + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment_resource_pool + mock_val = gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ) + assert arg == mock_val + arg = args[0].deployment_resource_pool_id + mock_val = "deployment_resource_pool_id_value" + assert arg == mock_val + + +def test_create_deployment_resource_pool_flattened_error(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment_resource_pool( + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest(), + parent="parent_value", + deployment_resource_pool=gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ), + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_deployment_resource_pool_flattened_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deployment_resource_pool( + parent="parent_value", + deployment_resource_pool=gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ), + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment_resource_pool + mock_val = gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ) + assert arg == mock_val + arg = args[0].deployment_resource_pool_id + mock_val = "deployment_resource_pool_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_deployment_resource_pool_flattened_error_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deployment_resource_pool( + deployment_resource_pool_service.CreateDeploymentResourcePoolRequest(), + parent="parent_value", + deployment_resource_pool=gca_deployment_resource_pool.DeploymentResourcePool( + name="name_value" + ), + deployment_resource_pool_id="deployment_resource_pool_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + deployment_resource_pool_service.GetDeploymentResourcePoolRequest, + dict, + ], +) +def test_get_deployment_resource_pool(request_type, transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = deployment_resource_pool.DeploymentResourcePool( + name="name_value", + ) + response = client.get_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.GetDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, deployment_resource_pool.DeploymentResourcePool) + assert response.name == "name_value" + + +def test_get_deployment_resource_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + client.get_deployment_resource_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.GetDeploymentResourcePoolRequest() + ) + + +@pytest.mark.asyncio +async def test_get_deployment_resource_pool_async( + transport: str = "grpc_asyncio", + request_type=deployment_resource_pool_service.GetDeploymentResourcePoolRequest, +): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool.DeploymentResourcePool( + name="name_value", + ) + ) + response = await client.get_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.GetDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, deployment_resource_pool.DeploymentResourcePool) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_deployment_resource_pool_async_from_dict(): + await test_get_deployment_resource_pool_async(request_type=dict) + + +def test_get_deployment_resource_pool_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.GetDeploymentResourcePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + call.return_value = deployment_resource_pool.DeploymentResourcePool() + client.get_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_deployment_resource_pool_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.GetDeploymentResourcePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool.DeploymentResourcePool() + ) + await client.get_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_deployment_resource_pool_flattened(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = deployment_resource_pool.DeploymentResourcePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deployment_resource_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_deployment_resource_pool_flattened_error(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment_resource_pool( + deployment_resource_pool_service.GetDeploymentResourcePoolRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_deployment_resource_pool_flattened_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = deployment_resource_pool.DeploymentResourcePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool.DeploymentResourcePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deployment_resource_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_deployment_resource_pool_flattened_error_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deployment_resource_pool( + deployment_resource_pool_service.GetDeploymentResourcePoolRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, + dict, + ], +) +def test_list_deployment_resource_pools(request_type, transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_deployment_resource_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.ListDeploymentResourcePoolsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentResourcePoolsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deployment_resource_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + client.list_deployment_resource_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.ListDeploymentResourcePoolsRequest() + ) + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_async( + transport: str = "grpc_asyncio", + request_type=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest, +): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_deployment_resource_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.ListDeploymentResourcePoolsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentResourcePoolsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_async_from_dict(): + await test_list_deployment_resource_pools_async(request_type=dict) + + +def test_list_deployment_resource_pools_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.ListDeploymentResourcePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + call.return_value = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse() + ) + client.list_deployment_resource_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.ListDeploymentResourcePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse() + ) + await client.list_deployment_resource_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_deployment_resource_pools_flattened(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deployment_resource_pools( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_deployment_resource_pools_flattened_error(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployment_resource_pools( + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_flattened_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deployment_resource_pools( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_flattened_error_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deployment_resource_pools( + deployment_resource_pool_service.ListDeploymentResourcePoolsRequest(), + parent="parent_value", + ) + + +def test_list_deployment_resource_pools_pager(transport_name: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[], + next_page_token="def", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deployment_resource_pools(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, deployment_resource_pool.DeploymentResourcePool) + for i in results + ) + + +def test_list_deployment_resource_pools_pages(transport_name: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[], + next_page_token="def", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deployment_resource_pools(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_async_pager(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[], + next_page_token="def", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deployment_resource_pools( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, deployment_resource_pool.DeploymentResourcePool) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_deployment_resource_pools_async_pages(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_resource_pools), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[], + next_page_token="def", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.ListDeploymentResourcePoolsResponse( + deployment_resource_pools=[ + deployment_resource_pool.DeploymentResourcePool(), + deployment_resource_pool.DeploymentResourcePool(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_deployment_resource_pools(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest, + dict, + ], +) +def test_delete_deployment_resource_pool(request_type, transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_deployment_resource_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + client.delete_deployment_resource_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest() + ) + + +@pytest.mark.asyncio +async def test_delete_deployment_resource_pool_async( + transport: str = "grpc_asyncio", + request_type=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest, +): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_deployment_resource_pool_async_from_dict(): + await test_delete_deployment_resource_pool_async(request_type=dict) + + +def test_delete_deployment_resource_pool_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deployment_resource_pool_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_deployment_resource_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_deployment_resource_pool_flattened(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deployment_resource_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_deployment_resource_pool_flattened_error(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment_resource_pool( + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_deployment_resource_pool_flattened_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_resource_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deployment_resource_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_deployment_resource_pool_flattened_error_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deployment_resource_pool( + deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + deployment_resource_pool_service.QueryDeployedModelsRequest, + dict, + ], +) +def test_query_deployed_models(request_type, transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.QueryDeployedModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.query_deployed_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == deployment_resource_pool_service.QueryDeployedModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryDeployedModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_query_deployed_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + client.query_deployed_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == deployment_resource_pool_service.QueryDeployedModelsRequest() + + +@pytest.mark.asyncio +async def test_query_deployed_models_async( + transport: str = "grpc_asyncio", + request_type=deployment_resource_pool_service.QueryDeployedModelsRequest, +): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.QueryDeployedModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.query_deployed_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == deployment_resource_pool_service.QueryDeployedModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryDeployedModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_query_deployed_models_async_from_dict(): + await test_query_deployed_models_async(request_type=dict) + + +def test_query_deployed_models_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.QueryDeployedModelsRequest() + + request.deployment_resource_pool = "deployment_resource_pool_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + call.return_value = ( + deployment_resource_pool_service.QueryDeployedModelsResponse() + ) + client.query_deployed_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment_resource_pool=deployment_resource_pool_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_deployed_models_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = deployment_resource_pool_service.QueryDeployedModelsRequest() + + request.deployment_resource_pool = "deployment_resource_pool_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.QueryDeployedModelsResponse() + ) + await client.query_deployed_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment_resource_pool=deployment_resource_pool_value", + ) in kw["metadata"] + + +def test_query_deployed_models_flattened(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.QueryDeployedModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.query_deployed_models( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].deployment_resource_pool + mock_val = "deployment_resource_pool_value" + assert arg == mock_val + + +def test_query_deployed_models_flattened_error(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_deployed_models( + deployment_resource_pool_service.QueryDeployedModelsRequest(), + deployment_resource_pool="deployment_resource_pool_value", + ) + + +@pytest.mark.asyncio +async def test_query_deployed_models_flattened_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + deployment_resource_pool_service.QueryDeployedModelsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + deployment_resource_pool_service.QueryDeployedModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.query_deployed_models( + deployment_resource_pool="deployment_resource_pool_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].deployment_resource_pool + mock_val = "deployment_resource_pool_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_query_deployed_models_flattened_error_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.query_deployed_models( + deployment_resource_pool_service.QueryDeployedModelsRequest(), + deployment_resource_pool="deployment_resource_pool_value", + ) + + +def test_query_deployed_models_pager(transport_name: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[], + next_page_token="def", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment_resource_pool", ""),) + ), + ) + pager = client.query_deployed_models(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, endpoint.DeployedModel) for i in results) + + +def test_query_deployed_models_pages(transport_name: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[], + next_page_token="def", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + ), + RuntimeError, + ) + pages = list(client.query_deployed_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_query_deployed_models_async_pager(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[], + next_page_token="def", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + ), + RuntimeError, + ) + async_pager = await client.query_deployed_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, endpoint.DeployedModel) for i in responses) + + +@pytest.mark.asyncio +async def test_query_deployed_models_async_pages(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_deployed_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + next_page_token="abc", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[], + next_page_token="def", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + ], + next_page_token="ghi", + ), + deployment_resource_pool_service.QueryDeployedModelsResponse( + deployed_models=[ + endpoint.DeployedModel(), + endpoint.DeployedModel(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.query_deployed_models(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeploymentResourcePoolServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DeploymentResourcePoolServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DeploymentResourcePoolServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DeploymentResourcePoolServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DeploymentResourcePoolServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DeploymentResourcePoolServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DeploymentResourcePoolServiceGrpcTransport, + ) + + +def test_deployment_resource_pool_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DeploymentResourcePoolServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_deployment_resource_pool_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.transports.DeploymentResourcePoolServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DeploymentResourcePoolServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_deployment_resource_pool", + "get_deployment_resource_pool", + "list_deployment_resource_pools", + "delete_deployment_resource_pool", + "query_deployed_models", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "wait_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_deployment_resource_pool_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.transports.DeploymentResourcePoolServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DeploymentResourcePoolServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_deployment_resource_pool_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.aiplatform_v1beta1.services.deployment_resource_pool_service.transports.DeploymentResourcePoolServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DeploymentResourcePoolServiceTransport() + adc.assert_called_once() + + +def test_deployment_resource_pool_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DeploymentResourcePoolServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_deployment_resource_pool_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_deployment_resource_pool_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DeploymentResourcePoolServiceGrpcTransport, grpc_helpers), + ( + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_deployment_resource_pool_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "aiplatform.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="aiplatform.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_deployment_resource_pool_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_deployment_resource_pool_service_host_no_port(transport_name): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="aiplatform.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("aiplatform.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_deployment_resource_pool_service_host_with_port(transport_name): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="aiplatform.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("aiplatform.googleapis.com:8000") + + +def test_deployment_resource_pool_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DeploymentResourcePoolServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_deployment_resource_pool_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_deployment_resource_pool_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeploymentResourcePoolServiceGrpcTransport, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ], +) +def test_deployment_resource_pool_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deployment_resource_pool_service_grpc_lro_client(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_deployment_resource_pool_service_grpc_lro_async_client(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_deployment_resource_pool_path(): + project = "squid" + location = "clam" + deployment_resource_pool = "whelk" + expected = "projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}".format( + project=project, + location=location, + deployment_resource_pool=deployment_resource_pool, + ) + actual = DeploymentResourcePoolServiceClient.deployment_resource_pool_path( + project, location, deployment_resource_pool + ) + assert expected == actual + + +def test_parse_deployment_resource_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "deployment_resource_pool": "nudibranch", + } + path = DeploymentResourcePoolServiceClient.deployment_resource_pool_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_deployment_resource_pool_path( + path + ) + assert expected == actual + + +def test_endpoint_path(): + project = "cuttlefish" + location = "mussel" + endpoint = "winkle" + expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( + project=project, + location=location, + endpoint=endpoint, + ) + actual = DeploymentResourcePoolServiceClient.endpoint_path( + project, location, endpoint + ) + assert expected == actual + + +def test_parse_endpoint_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "endpoint": "abalone", + } + path = DeploymentResourcePoolServiceClient.endpoint_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_endpoint_path(path) + assert expected == actual + + +def test_model_path(): + project = "squid" + location = "clam" + model = "whelk" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = DeploymentResourcePoolServiceClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "octopus", + "location": "oyster", + "model": "nudibranch", + } + path = DeploymentResourcePoolServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DeploymentResourcePoolServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = DeploymentResourcePoolServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DeploymentResourcePoolServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = DeploymentResourcePoolServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DeploymentResourcePoolServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = DeploymentResourcePoolServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = DeploymentResourcePoolServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = DeploymentResourcePoolServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DeploymentResourcePoolServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = DeploymentResourcePoolServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentResourcePoolServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DeploymentResourcePoolServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DeploymentResourcePoolServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DeploymentResourcePoolServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_wait_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_wait_operation_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_wait_operation_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations(transport: str = "grpc"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = DeploymentResourcePoolServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = DeploymentResourcePoolServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + DeploymentResourcePoolServiceClient, + transports.DeploymentResourcePoolServiceGrpcTransport, + ), + ( + DeploymentResourcePoolServiceAsyncClient, + transports.DeploymentResourcePoolServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py index 8bd6000c52..e2784df903 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py @@ -3207,10 +3207,38 @@ def test_endpoint_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_endpoint_path(): +def test_deployment_resource_pool_path(): project = "squid" location = "clam" - endpoint = "whelk" + deployment_resource_pool = "whelk" + expected = "projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}".format( + project=project, + location=location, + deployment_resource_pool=deployment_resource_pool, + ) + actual = EndpointServiceClient.deployment_resource_pool_path( + project, location, deployment_resource_pool + ) + assert expected == actual + + +def test_parse_deployment_resource_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "deployment_resource_pool": "nudibranch", + } + path = EndpointServiceClient.deployment_resource_pool_path(**expected) + + # Check that the path construction is reversible. + actual = EndpointServiceClient.parse_deployment_resource_pool_path(path) + assert expected == actual + + +def test_endpoint_path(): + project = "cuttlefish" + location = "mussel" + endpoint = "winkle" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( project=project, location=location, @@ -3222,9 +3250,9 @@ def test_endpoint_path(): def test_parse_endpoint_path(): expected = { - "project": "octopus", - "location": "oyster", - "endpoint": "nudibranch", + "project": "nautilus", + "location": "scallop", + "endpoint": "abalone", } path = EndpointServiceClient.endpoint_path(**expected) @@ -3234,9 +3262,9 @@ def test_parse_endpoint_path(): def test_model_path(): - project = "cuttlefish" - location = "mussel" - model = "winkle" + project = "squid" + location = "clam" + model = "whelk" expected = "projects/{project}/locations/{location}/models/{model}".format( project=project, location=location, @@ -3248,9 +3276,9 @@ def test_model_path(): def test_parse_model_path(): expected = { - "project": "nautilus", - "location": "scallop", - "model": "abalone", + "project": "octopus", + "location": "oyster", + "model": "nudibranch", } path = EndpointServiceClient.model_path(**expected) @@ -3260,9 +3288,9 @@ def test_parse_model_path(): def test_model_deployment_monitoring_job_path(): - project = "squid" - location = "clam" - model_deployment_monitoring_job = "whelk" + project = "cuttlefish" + location = "mussel" + model_deployment_monitoring_job = "winkle" expected = "projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}".format( project=project, location=location, @@ -3276,9 +3304,9 @@ def test_model_deployment_monitoring_job_path(): def test_parse_model_deployment_monitoring_job_path(): expected = { - "project": "octopus", - "location": "oyster", - "model_deployment_monitoring_job": "nudibranch", + "project": "nautilus", + "location": "scallop", + "model_deployment_monitoring_job": "abalone", } path = EndpointServiceClient.model_deployment_monitoring_job_path(**expected) @@ -3288,8 +3316,8 @@ def test_parse_model_deployment_monitoring_job_path(): def test_network_path(): - project = "cuttlefish" - network = "mussel" + project = "squid" + network = "clam" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -3300,8 +3328,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "winkle", - "network": "nautilus", + "project": "whelk", + "network": "octopus", } path = EndpointServiceClient.network_path(**expected) @@ -3311,7 +3339,7 @@ def test_parse_network_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3321,7 +3349,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nudibranch", } path = EndpointServiceClient.common_billing_account_path(**expected) @@ -3331,7 +3359,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -3341,7 +3369,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "mussel", } path = EndpointServiceClient.common_folder_path(**expected) @@ -3351,7 +3379,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -3361,7 +3389,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "nautilus", } path = EndpointServiceClient.common_organization_path(**expected) @@ -3371,7 +3399,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -3381,7 +3409,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "abalone", } path = EndpointServiceClient.common_project_path(**expected) @@ -3391,8 +3419,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3403,8 +3431,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "whelk", + "location": "octopus", } path = EndpointServiceClient.common_location_path(**expected) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 4c95de5a48..6e2a310b89 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -2032,19 +2032,22 @@ def test_parse_dataset_path(): def test_dataset_path(): project = "squid" - dataset = "clam" - expected = "projects/{project}/datasets/{dataset}".format( + location = "clam" + dataset = "whelk" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, dataset) + actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "whelk", - "dataset": "octopus", + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", } path = MigrationServiceClient.dataset_path(**expected) @@ -2054,22 +2057,19 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "oyster" - location = "nudibranch" - dataset = "cuttlefish" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + project = "cuttlefish" + dataset = "mussel" + expected = "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, location, dataset) + actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "mussel", - "location": "winkle", + "project": "winkle", "dataset": "nautilus", } path = MigrationServiceClient.dataset_path(**expected)