diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py index 9265fccceca5..dbcff5d80053 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import re from typing import ( Callable, @@ -59,6 +60,15 @@ from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class BatchServiceAsyncClient: """Google Batch Service. @@ -267,6 +277,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.batch_v1.BatchServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.batch.v1.BatchService", + "credentialsType": None, + }, + ) + async def create_job( self, request: Optional[Union[batch.CreateJobRequest, dict]] = None, @@ -276,7 +308,7 @@ async def create_job( job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -340,8 +372,10 @@ async def sample_create_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Job: @@ -404,7 +438,7 @@ async def get_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -445,8 +479,10 @@ async def sample_get_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Job: @@ -503,7 +539,7 @@ async def delete_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a Job. @@ -547,8 +583,10 @@ async def sample_delete_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -627,7 +665,7 @@ async def list_jobs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListJobsAsyncPager: r"""List all Jobs for a project within a region. @@ -668,8 +706,10 @@ async def sample_list_jobs(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.services.batch_service.pagers.ListJobsAsyncPager: @@ -744,7 +784,7 @@ async def get_task( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Return a single Task. @@ -785,8 +825,10 @@ async def sample_get_task(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Task: @@ -843,7 +885,7 @@ async def list_tasks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTasksAsyncPager: r"""List Tasks associated with a job. @@ -888,8 +930,10 @@ async def sample_list_tasks(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.services.batch_service.pagers.ListTasksAsyncPager: @@ -963,7 +1007,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -974,8 +1018,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1016,7 +1062,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1027,8 +1073,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1069,7 +1117,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1085,8 +1133,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1123,7 +1173,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1138,8 +1188,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1176,7 +1228,7 @@ async def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1187,8 +1239,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -1229,7 +1283,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1240,8 +1294,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index 22a7a7de0cb6..ebc654ce031a 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import ( @@ -48,6 +49,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -639,6 +649,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -701,6 +715,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.batch_v1.BatchServiceClient`.", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.batch.v1.BatchService", + "credentialsType": None, + }, + ) + def create_job( self, request: Optional[Union[batch.CreateJobRequest, dict]] = None, @@ -710,7 +747,7 @@ def create_job( job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -774,8 +811,10 @@ def sample_create_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Job: @@ -835,7 +874,7 @@ def get_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -876,8 +915,10 @@ def sample_get_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Job: @@ -933,7 +974,7 @@ def delete_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a Job. @@ -977,8 +1018,10 @@ def sample_delete_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1054,7 +1097,7 @@ def list_jobs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListJobsPager: r"""List all Jobs for a project within a region. @@ -1095,8 +1138,10 @@ def sample_list_jobs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.services.batch_service.pagers.ListJobsPager: @@ -1168,7 +1213,7 @@ def get_task( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Return a single Task. @@ -1209,8 +1254,10 @@ def sample_get_task(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.types.Task: @@ -1266,7 +1313,7 @@ def list_tasks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTasksPager: r"""List Tasks associated with a job. @@ -1311,8 +1358,10 @@ def sample_list_tasks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1.services.batch_service.pagers.ListTasksPager: @@ -1396,7 +1445,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1407,8 +1456,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1449,7 +1500,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1460,8 +1511,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1502,7 +1555,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1518,8 +1571,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1556,7 +1611,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1571,8 +1626,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1609,7 +1666,7 @@ def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1620,8 +1677,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -1662,7 +1721,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1673,8 +1732,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/pagers.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/pagers.py index a15be60cf686..4f4dc5225bba 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/pagers.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/pagers.py @@ -67,7 +67,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -81,8 +81,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListJobsRequest(request) @@ -141,7 +143,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -155,8 +157,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListJobsRequest(request) @@ -219,7 +223,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -233,8 +237,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListTasksRequest(request) @@ -293,7 +299,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -307,8 +313,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListTasksRequest(request) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py index 33a443a9b3c3..394aaf603b5d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings @@ -22,7 +25,10 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.batch_v1.types import batch from google.cloud.batch_v1.types import job @@ -31,6 +37,81 @@ from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class BatchServiceGrpcTransport(BatchServiceTransport): """gRPC backend transport for BatchService. @@ -187,7 +268,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -251,7 +337,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -273,7 +361,7 @@ def create_job(self) -> Callable[[batch.CreateJobRequest], gcb_job.Job]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( + self._stubs["create_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/CreateJob", request_serializer=batch.CreateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -297,7 +385,7 @@ def get_job(self) -> Callable[[batch.GetJobRequest], job.Job]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( + self._stubs["get_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/GetJob", request_serializer=batch.GetJobRequest.serialize, response_deserializer=job.Job.deserialize, @@ -323,7 +411,7 @@ def delete_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( + self._stubs["delete_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/DeleteJob", request_serializer=batch.DeleteJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -347,7 +435,7 @@ def list_jobs(self) -> Callable[[batch.ListJobsRequest], batch.ListJobsResponse] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + self._stubs["list_jobs"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/ListJobs", request_serializer=batch.ListJobsRequest.serialize, response_deserializer=batch.ListJobsResponse.deserialize, @@ -371,7 +459,7 @@ def get_task(self) -> Callable[[batch.GetTaskRequest], task.Task]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_task" not in self._stubs: - self._stubs["get_task"] = self.grpc_channel.unary_unary( + self._stubs["get_task"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/GetTask", request_serializer=batch.GetTaskRequest.serialize, response_deserializer=task.Task.deserialize, @@ -395,7 +483,7 @@ def list_tasks(self) -> Callable[[batch.ListTasksRequest], batch.ListTasksRespon # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tasks" not in self._stubs: - self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + self._stubs["list_tasks"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/ListTasks", request_serializer=batch.ListTasksRequest.serialize, response_deserializer=batch.ListTasksResponse.deserialize, @@ -403,7 +491,7 @@ def list_tasks(self) -> Callable[[batch.ListTasksRequest], batch.ListTasksRespon return self._stubs["list_tasks"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -415,7 +503,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -432,7 +520,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -449,7 +537,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -468,7 +556,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -487,7 +575,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -504,7 +592,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py index 2f2ba85a1631..e684e74c794e 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import logging as std_logging +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings @@ -24,8 +27,11 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.batch_v1.types import batch from google.cloud.batch_v1.types import job @@ -35,6 +41,82 @@ from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .grpc import BatchServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): """gRPC AsyncIO backend transport for BatchService. @@ -234,10 +316,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -260,7 +345,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -283,7 +368,7 @@ def create_job(self) -> Callable[[batch.CreateJobRequest], Awaitable[gcb_job.Job # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( + self._stubs["create_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/CreateJob", request_serializer=batch.CreateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -307,7 +392,7 @@ def get_job(self) -> Callable[[batch.GetJobRequest], Awaitable[job.Job]]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( + self._stubs["get_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/GetJob", request_serializer=batch.GetJobRequest.serialize, response_deserializer=job.Job.deserialize, @@ -333,7 +418,7 @@ def delete_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( + self._stubs["delete_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/DeleteJob", request_serializer=batch.DeleteJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -359,7 +444,7 @@ def list_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + self._stubs["list_jobs"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/ListJobs", request_serializer=batch.ListJobsRequest.serialize, response_deserializer=batch.ListJobsResponse.deserialize, @@ -383,7 +468,7 @@ def get_task(self) -> Callable[[batch.GetTaskRequest], Awaitable[task.Task]]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_task" not in self._stubs: - self._stubs["get_task"] = self.grpc_channel.unary_unary( + self._stubs["get_task"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/GetTask", request_serializer=batch.GetTaskRequest.serialize, response_deserializer=task.Task.deserialize, @@ -409,7 +494,7 @@ def list_tasks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tasks" not in self._stubs: - self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + self._stubs["list_tasks"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1.BatchService/ListTasks", request_serializer=batch.ListTasksRequest.serialize, response_deserializer=batch.ListTasksResponse.deserialize, @@ -523,7 +608,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -539,7 +624,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -556,7 +641,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -573,7 +658,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -592,7 +677,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -611,7 +696,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -628,7 +713,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py index ca9bf1f0299b..28e9964eee32 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py @@ -13,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import dataclasses import json # type: ignore +import logging from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings @@ -42,6 +42,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -120,8 +128,10 @@ def post_list_tasks(self, response): """ def pre_create_job( - self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.CreateJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_job Override in a subclass to manipulate the request or metadata @@ -139,8 +149,10 @@ def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: return response def pre_delete_job( - self, request: batch.DeleteJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.DeleteJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_job Override in a subclass to manipulate the request or metadata @@ -160,8 +172,10 @@ def post_delete_job( return response def pre_get_job( - self, request: batch.GetJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.GetJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_job Override in a subclass to manipulate the request or metadata @@ -179,8 +193,10 @@ def post_get_job(self, response: job.Job) -> job.Job: return response def pre_get_task( - self, request: batch.GetTaskRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.GetTaskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_task Override in a subclass to manipulate the request or metadata @@ -198,8 +214,10 @@ def post_get_task(self, response: task.Task) -> task.Task: return response def pre_list_jobs( - self, request: batch.ListJobsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.ListJobsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_jobs Override in a subclass to manipulate the request or metadata @@ -219,8 +237,10 @@ def post_list_jobs( return response def pre_list_tasks( - self, request: batch.ListTasksRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.ListTasksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_tasks Override in a subclass to manipulate the request or metadata @@ -242,8 +262,10 @@ def post_list_tasks( def pre_get_location( self, request: locations_pb2.GetLocationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -265,8 +287,10 @@ def post_get_location( def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -288,8 +312,10 @@ def post_list_locations( def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -309,8 +335,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -330,8 +358,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -353,8 +383,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -551,7 +583,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Call the create job method over HTTP. @@ -561,8 +593,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gcb_job.Job: @@ -572,6 +606,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseCreateJob._get_http_options() ) + request, metadata = self._interceptor.pre_create_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseCreateJob._get_transcoded_request( @@ -590,6 +625,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.CreateJob", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "CreateJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CreateJob._get_response( self._host, @@ -611,7 +673,29 @@ def __call__( pb_resp = gcb_job.Job.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcb_job.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.create_job", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "CreateJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteJob( @@ -648,7 +732,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete job method over HTTP. @@ -658,8 +742,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -672,6 +758,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseDeleteJob._get_http_options() ) + request, metadata = self._interceptor.pre_delete_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseDeleteJob._get_transcoded_request( @@ -686,6 +773,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.DeleteJob", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "DeleteJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._DeleteJob._get_response( self._host, @@ -704,7 +818,29 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.delete_job", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "DeleteJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetJob(_BaseBatchServiceRestTransport._BaseGetJob, BatchServiceRestStub): @@ -739,7 +875,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Call the get job method over HTTP. @@ -749,8 +885,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.job.Job: @@ -760,6 +898,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetJob._get_http_options() ) + request, metadata = self._interceptor.pre_get_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetJob._get_transcoded_request( @@ -774,6 +913,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetJob", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetJob._get_response( self._host, @@ -794,7 +960,29 @@ def __call__( pb_resp = job.Job.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = job.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.get_job", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetTask(_BaseBatchServiceRestTransport._BaseGetTask, BatchServiceRestStub): @@ -829,7 +1017,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Call the get task method over HTTP. @@ -839,8 +1027,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.task.Task: @@ -850,6 +1040,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetTask._get_http_options() ) + request, metadata = self._interceptor.pre_get_task(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetTask._get_transcoded_request( @@ -864,6 +1055,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetTask", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetTask._get_response( self._host, @@ -884,7 +1102,29 @@ def __call__( pb_resp = task.Task.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_task(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = task.Task.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.get_task", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListJobs(_BaseBatchServiceRestTransport._BaseListJobs, BatchServiceRestStub): @@ -919,7 +1159,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> batch.ListJobsResponse: r"""Call the list jobs method over HTTP. @@ -929,8 +1169,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.batch.ListJobsResponse: @@ -940,6 +1182,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListJobs._get_http_options() ) + request, metadata = self._interceptor.pre_list_jobs(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseListJobs._get_transcoded_request( @@ -954,6 +1197,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListJobs", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListJobs._get_response( self._host, @@ -974,7 +1244,29 @@ def __call__( pb_resp = batch.ListJobsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = batch.ListJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.list_jobs", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTasks( @@ -1011,7 +1303,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> batch.ListTasksResponse: r"""Call the list tasks method over HTTP. @@ -1021,8 +1313,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.batch.ListTasksResponse: @@ -1032,6 +1326,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListTasks._get_http_options() ) + request, metadata = self._interceptor.pre_list_tasks(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseListTasks._get_transcoded_request( @@ -1046,6 +1341,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListTasks", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListTasks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListTasks._get_response( self._host, @@ -1066,7 +1388,29 @@ def __call__( pb_resp = batch.ListTasksResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tasks(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = batch.ListTasksResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.list_tasks", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListTasks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1145,7 +1489,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -1155,8 +1499,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. @@ -1165,6 +1511,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetLocation._get_http_options() ) + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetLocation._get_transcoded_request( @@ -1179,6 +1526,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetLocation._get_response( self._host, @@ -1198,6 +1572,27 @@ def __call__( resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1238,7 +1633,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -1248,8 +1643,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. @@ -1258,6 +1655,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListLocations._get_http_options() ) + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseListLocations._get_transcoded_request( http_options, request @@ -1268,6 +1666,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListLocations._get_response( self._host, @@ -1287,6 +1712,27 @@ def __call__( resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1328,7 +1774,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1338,13 +1784,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseBatchServiceRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -1361,6 +1810,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CancelOperation._get_response( self._host, @@ -1417,7 +1893,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -1427,13 +1903,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseBatchServiceRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -1446,6 +1925,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._DeleteOperation._get_response( self._host, @@ -1501,7 +2007,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1511,8 +2017,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1521,6 +2029,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseGetOperation._get_transcoded_request( http_options, request @@ -1533,6 +2042,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetOperation._get_response( self._host, @@ -1552,6 +2088,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1592,7 +2149,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1602,8 +2159,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -1612,6 +2171,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseListOperations._get_transcoded_request( http_options, request @@ -1622,6 +2182,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListOperations._get_response( self._host, @@ -1641,6 +2228,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 0fd45dd52761..64df7c9ddcc1 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -141,22 +141,29 @@ class Job(proto.Message): class LogsPolicy(proto.Message): - r"""LogsPolicy describes how outputs from a Job's Tasks - (stdout/stderr) will be preserved. + r"""LogsPolicy describes if and how a job's logs are preserved. Logs + include information that is automatically written by the Batch + service agent and any information that you configured the job's + runnables to write to the ``stdout`` or ``stderr`` streams. Attributes: destination (google.cloud.batch_v1.types.LogsPolicy.Destination): - Where logs should be saved. + If and where logs should be saved. logs_path (str): - The path to which logs are saved when the - destination = PATH. This can be a local file - path on the VM, or under the mount point of a - Persistent Disk or Filestore, or a Cloud Storage - path. + When ``destination`` is set to ``PATH``, you must set this + field to the path where you want logs to be saved. This path + can point to a local directory on the VM or (if congifured) + a directory under the mount path of any Cloud Storage + bucket, network file system (NFS), or writable persistent + disk that is mounted to the job. For example, if the job has + a bucket with ``mountPath`` set to ``/mnt/disks/my-bucket``, + you can write logs to the root directory of the + ``remotePath`` of that bucket by setting this field to + ``/mnt/disks/my-bucket/``. cloud_logging_option (google.cloud.batch_v1.types.LogsPolicy.CloudLoggingOption): - Optional. Additional settings for Cloud Logging. It will - only take effect when the destination of ``LogsPolicy`` is - set to ``CLOUD_LOGGING``. + Optional. When ``destination`` is set to ``CLOUD_LOGGING``, + you can optionally set this field to configure additional + settings for Cloud Logging. """ class Destination(proto.Enum): @@ -164,11 +171,14 @@ class Destination(proto.Enum): Values: DESTINATION_UNSPECIFIED (0): - Logs are not preserved. + (Default) Logs are not preserved. CLOUD_LOGGING (1): - Logs are streamed to Cloud Logging. + Logs are streamed to Cloud Logging. Optionally, you can + configure additional settings in the ``cloudLoggingOption`` + field. PATH (2): - Logs are saved to a file path. + Logs are saved to the file path specified in the + ``logsPath`` field. """ DESTINATION_UNSPECIFIED = 0 CLOUD_LOGGING = 1 @@ -180,8 +190,8 @@ class CloudLoggingOption(proto.Message): Attributes: use_generic_task_monitored_resource (bool): - Optional. Set this flag to true to change the `monitored - resource + Optional. Set this field to ``true`` to change the + `monitored resource type `__ for Cloud Logging logs generated by this Batch job from the ```batch.googleapis.com/Job`` `__ @@ -739,10 +749,10 @@ class InstancePolicy(proto.Message): file system or a raw storage drive that is not ready for data storage and accessing. reservation (str): - Optional. If specified, VMs will consume only the specified - reservation. If not specified (default), VMs will consume - any applicable reservation. Additionally, VMs will not - consume any reservation if "NO_RESERVATION" is specified. + Optional. If not specified (default), VMs will consume any + applicable reservation. If "NO_RESERVATION" is specified, + VMs will not consume any reservation. Otherwise, if + specified, VMs will consume only the specified reservation. """ machine_type: str = proto.Field( @@ -804,9 +814,9 @@ class InstancePolicyOrTemplate(proto.Message): field as 'instance_template' instead of 'template' to avoid C++ keyword conflict. - Batch only supports global instance templates. You can - specify the global instance template as a full or partial - URL. + Batch only supports global instance templates from the same + project as the job. You can specify the global instance + template as a full or partial URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index 72842b65808a..5be36827accf 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import re from typing import ( Callable, @@ -65,6 +66,15 @@ from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class BatchServiceAsyncClient: """Google Batch Service. @@ -277,6 +287,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.batch_v1alpha.BatchServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "credentialsType": None, + }, + ) + async def create_job( self, request: Optional[Union[batch.CreateJobRequest, dict]] = None, @@ -286,7 +318,7 @@ async def create_job( job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -350,8 +382,10 @@ async def sample_create_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -414,7 +448,7 @@ async def get_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -455,8 +489,10 @@ async def sample_get_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -513,7 +549,7 @@ async def delete_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a Job. @@ -557,8 +593,10 @@ async def sample_delete_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -637,7 +675,7 @@ async def cancel_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Cancel a Job. @@ -682,8 +720,10 @@ async def sample_cancel_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -756,7 +796,7 @@ async def update_job( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Update a Job. @@ -824,8 +864,10 @@ async def sample_update_job(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -886,7 +928,7 @@ async def list_jobs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListJobsAsyncPager: r"""List all Jobs for a project within a region. @@ -927,8 +969,10 @@ async def sample_list_jobs(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsAsyncPager: @@ -1003,7 +1047,7 @@ async def get_task( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Return a single Task. @@ -1044,8 +1088,10 @@ async def sample_get_task(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Task: @@ -1102,7 +1148,7 @@ async def list_tasks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTasksAsyncPager: r"""List Tasks associated with a job. @@ -1147,8 +1193,10 @@ async def sample_list_tasks(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksAsyncPager: @@ -1225,7 +1273,7 @@ async def create_resource_allowance( resource_allowance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Create a Resource Allowance. @@ -1299,8 +1347,10 @@ async def sample_create_resource_allowance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -1367,7 +1417,7 @@ async def get_resource_allowance( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource_allowance.ResourceAllowance: r"""Get a ResourceAllowance specified by its resource name. @@ -1409,8 +1459,10 @@ async def sample_get_resource_allowance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -1473,7 +1525,7 @@ async def delete_resource_allowance( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a ResourceAllowance. @@ -1518,8 +1570,10 @@ async def sample_delete_resource_allowance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1598,7 +1652,7 @@ async def list_resource_allowances( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListResourceAllowancesAsyncPager: r"""List all ResourceAllowances for a project within a region. @@ -1641,8 +1695,10 @@ async def sample_list_resource_allowances(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesAsyncPager: @@ -1718,7 +1774,7 @@ async def update_resource_allowance( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Update a Resource Allowance. @@ -1783,8 +1839,10 @@ async def sample_update_resource_allowance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -1850,7 +1908,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1861,8 +1919,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1903,7 +1963,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1914,8 +1974,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1956,7 +2018,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1972,8 +2034,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2010,7 +2074,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2025,8 +2089,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2063,7 +2129,7 @@ async def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -2074,8 +2140,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -2116,7 +2184,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2127,8 +2195,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index b8db242b68b7..00e0a1b408c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import ( @@ -48,6 +49,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -667,6 +677,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -729,6 +743,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.batch_v1alpha.BatchServiceClient`.", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "credentialsType": None, + }, + ) + def create_job( self, request: Optional[Union[batch.CreateJobRequest, dict]] = None, @@ -738,7 +775,7 @@ def create_job( job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -802,8 +839,10 @@ def sample_create_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -863,7 +902,7 @@ def get_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -904,8 +943,10 @@ def sample_get_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -961,7 +1002,7 @@ def delete_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a Job. @@ -1005,8 +1046,10 @@ def sample_delete_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1082,7 +1125,7 @@ def cancel_job( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Cancel a Job. @@ -1127,8 +1170,10 @@ def sample_cancel_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1198,7 +1243,7 @@ def update_job( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Update a Job. @@ -1266,8 +1311,10 @@ def sample_update_job(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Job: @@ -1325,7 +1372,7 @@ def list_jobs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListJobsPager: r"""List all Jobs for a project within a region. @@ -1366,8 +1413,10 @@ def sample_list_jobs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsPager: @@ -1439,7 +1488,7 @@ def get_task( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Return a single Task. @@ -1480,8 +1529,10 @@ def sample_get_task(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.Task: @@ -1537,7 +1588,7 @@ def list_tasks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTasksPager: r"""List Tasks associated with a job. @@ -1582,8 +1633,10 @@ def sample_list_tasks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksPager: @@ -1657,7 +1710,7 @@ def create_resource_allowance( resource_allowance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Create a Resource Allowance. @@ -1731,8 +1784,10 @@ def sample_create_resource_allowance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -1798,7 +1853,7 @@ def get_resource_allowance( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource_allowance.ResourceAllowance: r"""Get a ResourceAllowance specified by its resource name. @@ -1840,8 +1895,10 @@ def sample_get_resource_allowance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -1901,7 +1958,7 @@ def delete_resource_allowance( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a ResourceAllowance. @@ -1946,8 +2003,10 @@ def sample_delete_resource_allowance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2025,7 +2084,7 @@ def list_resource_allowances( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListResourceAllowancesPager: r"""List all ResourceAllowances for a project within a region. @@ -2068,8 +2127,10 @@ def sample_list_resource_allowances(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesPager: @@ -2142,7 +2203,7 @@ def update_resource_allowance( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Update a Resource Allowance. @@ -2207,8 +2268,10 @@ def sample_update_resource_allowance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.batch_v1alpha.types.ResourceAllowance: @@ -2286,7 +2349,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2297,8 +2360,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2339,7 +2404,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2350,8 +2415,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2392,7 +2459,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2408,8 +2475,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2446,7 +2515,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2461,8 +2530,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2499,7 +2570,7 @@ def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -2510,8 +2581,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -2552,7 +2625,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2563,8 +2636,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/pagers.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/pagers.py index 4d01baed6669..b6d76a2a7a30 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/pagers.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/pagers.py @@ -67,7 +67,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -81,8 +81,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListJobsRequest(request) @@ -141,7 +143,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -155,8 +157,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListJobsRequest(request) @@ -219,7 +223,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -233,8 +237,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListTasksRequest(request) @@ -293,7 +299,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -307,8 +313,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListTasksRequest(request) @@ -371,7 +379,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -385,8 +393,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListResourceAllowancesRequest(request) @@ -445,7 +455,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -459,8 +469,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = batch.ListResourceAllowancesRequest(request) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py index b723e9d7f161..8d356b9a1189 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings @@ -22,7 +25,10 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.batch_v1alpha.types import ( resource_allowance as gcb_resource_allowance, @@ -35,6 +41,81 @@ from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class BatchServiceGrpcTransport(BatchServiceTransport): """gRPC backend transport for BatchService. @@ -191,7 +272,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -255,7 +341,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -277,7 +365,7 @@ def create_job(self) -> Callable[[batch.CreateJobRequest], gcb_job.Job]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( + self._stubs["create_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CreateJob", request_serializer=batch.CreateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -301,7 +389,7 @@ def get_job(self) -> Callable[[batch.GetJobRequest], job.Job]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( + self._stubs["get_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetJob", request_serializer=batch.GetJobRequest.serialize, response_deserializer=job.Job.deserialize, @@ -327,7 +415,7 @@ def delete_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( + self._stubs["delete_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/DeleteJob", request_serializer=batch.DeleteJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -353,7 +441,7 @@ def cancel_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_job" not in self._stubs: - self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + self._stubs["cancel_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CancelJob", request_serializer=batch.CancelJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -377,7 +465,7 @@ def update_job(self) -> Callable[[batch.UpdateJobRequest], gcb_job.Job]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_job" not in self._stubs: - self._stubs["update_job"] = self.grpc_channel.unary_unary( + self._stubs["update_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/UpdateJob", request_serializer=batch.UpdateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -401,7 +489,7 @@ def list_jobs(self) -> Callable[[batch.ListJobsRequest], batch.ListJobsResponse] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + self._stubs["list_jobs"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListJobs", request_serializer=batch.ListJobsRequest.serialize, response_deserializer=batch.ListJobsResponse.deserialize, @@ -425,7 +513,7 @@ def get_task(self) -> Callable[[batch.GetTaskRequest], task.Task]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_task" not in self._stubs: - self._stubs["get_task"] = self.grpc_channel.unary_unary( + self._stubs["get_task"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetTask", request_serializer=batch.GetTaskRequest.serialize, response_deserializer=task.Task.deserialize, @@ -449,7 +537,7 @@ def list_tasks(self) -> Callable[[batch.ListTasksRequest], batch.ListTasksRespon # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tasks" not in self._stubs: - self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + self._stubs["list_tasks"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListTasks", request_serializer=batch.ListTasksRequest.serialize, response_deserializer=batch.ListTasksResponse.deserialize, @@ -477,7 +565,7 @@ def create_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_resource_allowance" not in self._stubs: - self._stubs["create_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["create_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CreateResourceAllowance", request_serializer=batch.CreateResourceAllowanceRequest.serialize, response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, @@ -506,7 +594,7 @@ def get_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_resource_allowance" not in self._stubs: - self._stubs["get_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["get_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetResourceAllowance", request_serializer=batch.GetResourceAllowanceRequest.serialize, response_deserializer=resource_allowance.ResourceAllowance.deserialize, @@ -532,7 +620,7 @@ def delete_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_resource_allowance" not in self._stubs: - self._stubs["delete_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["delete_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/DeleteResourceAllowance", request_serializer=batch.DeleteResourceAllowanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -561,7 +649,7 @@ def list_resource_allowances( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_resource_allowances" not in self._stubs: - self._stubs["list_resource_allowances"] = self.grpc_channel.unary_unary( + self._stubs["list_resource_allowances"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListResourceAllowances", request_serializer=batch.ListResourceAllowancesRequest.serialize, response_deserializer=batch.ListResourceAllowancesResponse.deserialize, @@ -589,7 +677,7 @@ def update_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_resource_allowance" not in self._stubs: - self._stubs["update_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["update_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/UpdateResourceAllowance", request_serializer=batch.UpdateResourceAllowanceRequest.serialize, response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, @@ -597,7 +685,7 @@ def update_resource_allowance( return self._stubs["update_resource_allowance"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -609,7 +697,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -626,7 +714,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -643,7 +731,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -662,7 +750,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -681,7 +769,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -698,7 +786,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py index af20c6190656..e6194dd430c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import logging as std_logging +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings @@ -24,8 +27,11 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.batch_v1alpha.types import ( resource_allowance as gcb_resource_allowance, @@ -39,6 +45,82 @@ from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .grpc import BatchServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): """gRPC AsyncIO backend transport for BatchService. @@ -238,10 +320,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -264,7 +349,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -287,7 +372,7 @@ def create_job(self) -> Callable[[batch.CreateJobRequest], Awaitable[gcb_job.Job # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( + self._stubs["create_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CreateJob", request_serializer=batch.CreateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -311,7 +396,7 @@ def get_job(self) -> Callable[[batch.GetJobRequest], Awaitable[job.Job]]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( + self._stubs["get_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetJob", request_serializer=batch.GetJobRequest.serialize, response_deserializer=job.Job.deserialize, @@ -337,7 +422,7 @@ def delete_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( + self._stubs["delete_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/DeleteJob", request_serializer=batch.DeleteJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -363,7 +448,7 @@ def cancel_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_job" not in self._stubs: - self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + self._stubs["cancel_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CancelJob", request_serializer=batch.CancelJobRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -387,7 +472,7 @@ def update_job(self) -> Callable[[batch.UpdateJobRequest], Awaitable[gcb_job.Job # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_job" not in self._stubs: - self._stubs["update_job"] = self.grpc_channel.unary_unary( + self._stubs["update_job"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/UpdateJob", request_serializer=batch.UpdateJobRequest.serialize, response_deserializer=gcb_job.Job.deserialize, @@ -413,7 +498,7 @@ def list_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + self._stubs["list_jobs"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListJobs", request_serializer=batch.ListJobsRequest.serialize, response_deserializer=batch.ListJobsResponse.deserialize, @@ -437,7 +522,7 @@ def get_task(self) -> Callable[[batch.GetTaskRequest], Awaitable[task.Task]]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_task" not in self._stubs: - self._stubs["get_task"] = self.grpc_channel.unary_unary( + self._stubs["get_task"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetTask", request_serializer=batch.GetTaskRequest.serialize, response_deserializer=task.Task.deserialize, @@ -463,7 +548,7 @@ def list_tasks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tasks" not in self._stubs: - self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + self._stubs["list_tasks"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListTasks", request_serializer=batch.ListTasksRequest.serialize, response_deserializer=batch.ListTasksResponse.deserialize, @@ -492,7 +577,7 @@ def create_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_resource_allowance" not in self._stubs: - self._stubs["create_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["create_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/CreateResourceAllowance", request_serializer=batch.CreateResourceAllowanceRequest.serialize, response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, @@ -522,7 +607,7 @@ def get_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_resource_allowance" not in self._stubs: - self._stubs["get_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["get_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/GetResourceAllowance", request_serializer=batch.GetResourceAllowanceRequest.serialize, response_deserializer=resource_allowance.ResourceAllowance.deserialize, @@ -550,7 +635,7 @@ def delete_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_resource_allowance" not in self._stubs: - self._stubs["delete_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["delete_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/DeleteResourceAllowance", request_serializer=batch.DeleteResourceAllowanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -580,7 +665,7 @@ def list_resource_allowances( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_resource_allowances" not in self._stubs: - self._stubs["list_resource_allowances"] = self.grpc_channel.unary_unary( + self._stubs["list_resource_allowances"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/ListResourceAllowances", request_serializer=batch.ListResourceAllowancesRequest.serialize, response_deserializer=batch.ListResourceAllowancesResponse.deserialize, @@ -609,7 +694,7 @@ def update_resource_allowance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_resource_allowance" not in self._stubs: - self._stubs["update_resource_allowance"] = self.grpc_channel.unary_unary( + self._stubs["update_resource_allowance"] = self._logged_channel.unary_unary( "/google.cloud.batch.v1alpha.BatchService/UpdateResourceAllowance", request_serializer=batch.UpdateResourceAllowanceRequest.serialize, response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, @@ -776,7 +861,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -792,7 +877,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -809,7 +894,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -826,7 +911,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -845,7 +930,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -864,7 +949,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -881,7 +966,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py index 0dde09cd9956..14751deec0bf 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py @@ -13,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import dataclasses import json # type: ignore +import logging from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings @@ -46,6 +46,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -180,8 +188,10 @@ def post_update_resource_allowance(self, response): """ def pre_cancel_job( - self, request: batch.CancelJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.CancelJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_job Override in a subclass to manipulate the request or metadata @@ -201,8 +211,10 @@ def post_cancel_job( return response def pre_create_job( - self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.CreateJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_job Override in a subclass to manipulate the request or metadata @@ -222,8 +234,10 @@ def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: def pre_create_resource_allowance( self, request: batch.CreateResourceAllowanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[batch.CreateResourceAllowanceRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.CreateResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_resource_allowance Override in a subclass to manipulate the request or metadata @@ -243,8 +257,10 @@ def post_create_resource_allowance( return response def pre_delete_job( - self, request: batch.DeleteJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.DeleteJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_job Override in a subclass to manipulate the request or metadata @@ -266,8 +282,10 @@ def post_delete_job( def pre_delete_resource_allowance( self, request: batch.DeleteResourceAllowanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[batch.DeleteResourceAllowanceRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.DeleteResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_resource_allowance Override in a subclass to manipulate the request or metadata @@ -287,8 +305,10 @@ def post_delete_resource_allowance( return response def pre_get_job( - self, request: batch.GetJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.GetJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_job Override in a subclass to manipulate the request or metadata @@ -308,8 +328,10 @@ def post_get_job(self, response: job.Job) -> job.Job: def pre_get_resource_allowance( self, request: batch.GetResourceAllowanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[batch.GetResourceAllowanceRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.GetResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_resource_allowance Override in a subclass to manipulate the request or metadata @@ -329,8 +351,10 @@ def post_get_resource_allowance( return response def pre_get_task( - self, request: batch.GetTaskRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.GetTaskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_task Override in a subclass to manipulate the request or metadata @@ -348,8 +372,10 @@ def post_get_task(self, response: task.Task) -> task.Task: return response def pre_list_jobs( - self, request: batch.ListJobsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.ListJobsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_jobs Override in a subclass to manipulate the request or metadata @@ -371,8 +397,10 @@ def post_list_jobs( def pre_list_resource_allowances( self, request: batch.ListResourceAllowancesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[batch.ListResourceAllowancesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.ListResourceAllowancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_resource_allowances Override in a subclass to manipulate the request or metadata @@ -392,8 +420,10 @@ def post_list_resource_allowances( return response def pre_list_tasks( - self, request: batch.ListTasksRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.ListTasksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_tasks Override in a subclass to manipulate the request or metadata @@ -413,8 +443,10 @@ def post_list_tasks( return response def pre_update_job( - self, request: batch.UpdateJobRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[batch.UpdateJobRequest, Sequence[Tuple[str, str]]]: + self, + request: batch.UpdateJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.UpdateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_job Override in a subclass to manipulate the request or metadata @@ -434,8 +466,10 @@ def post_update_job(self, response: gcb_job.Job) -> gcb_job.Job: def pre_update_resource_allowance( self, request: batch.UpdateResourceAllowanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[batch.UpdateResourceAllowanceRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.UpdateResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_resource_allowance Override in a subclass to manipulate the request or metadata @@ -457,8 +491,10 @@ def post_update_resource_allowance( def pre_get_location( self, request: locations_pb2.GetLocationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -480,8 +516,10 @@ def post_get_location( def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -503,8 +541,10 @@ def post_list_locations( def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -524,8 +564,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -545,8 +587,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -568,8 +612,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -766,7 +812,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the cancel job method over HTTP. @@ -776,8 +822,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -790,6 +838,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseCancelJob._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseCancelJob._get_transcoded_request( @@ -808,6 +857,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CancelJob", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CancelJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CancelJob._get_response( self._host, @@ -827,7 +903,29 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.cancel_job", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CancelJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateJob( @@ -865,7 +963,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Call the create job method over HTTP. @@ -875,8 +973,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gcb_job.Job: @@ -886,6 +986,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseCreateJob._get_http_options() ) + request, metadata = self._interceptor.pre_create_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseCreateJob._get_transcoded_request( @@ -904,6 +1005,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CreateJob", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CreateJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CreateJob._get_response( self._host, @@ -925,7 +1053,29 @@ def __call__( pb_resp = gcb_job.Job.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcb_job.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.create_job", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CreateJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateResourceAllowance( @@ -964,7 +1114,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Call the create resource allowance method over HTTP. @@ -974,8 +1124,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gcb_resource_allowance.ResourceAllowance: @@ -989,6 +1141,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_http_options() ) + request, metadata = self._interceptor.pre_create_resource_allowance( request, metadata ) @@ -1005,6 +1158,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CreateResourceAllowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CreateResourceAllowance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CreateResourceAllowance._get_response( self._host, @@ -1026,7 +1206,31 @@ def __call__( pb_resp = gcb_resource_allowance.ResourceAllowance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_resource_allowance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcb_resource_allowance.ResourceAllowance.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.create_resource_allowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CreateResourceAllowance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteJob( @@ -1063,7 +1267,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete job method over HTTP. @@ -1073,8 +1277,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1087,6 +1293,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseDeleteJob._get_http_options() ) + request, metadata = self._interceptor.pre_delete_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseDeleteJob._get_transcoded_request( @@ -1101,6 +1308,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteJob", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "DeleteJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._DeleteJob._get_response( self._host, @@ -1119,7 +1353,29 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.delete_job", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "DeleteJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteResourceAllowance( @@ -1157,7 +1413,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete resource allowance method over HTTP. @@ -1167,8 +1423,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1181,6 +1439,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseDeleteResourceAllowance._get_http_options() ) + request, metadata = self._interceptor.pre_delete_resource_allowance( request, metadata ) @@ -1193,6 +1452,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteResourceAllowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "DeleteResourceAllowance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._DeleteResourceAllowance._get_response( self._host, @@ -1211,7 +1497,29 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_resource_allowance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.delete_resource_allowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "DeleteResourceAllowance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetJob(_BaseBatchServiceRestTransport._BaseGetJob, BatchServiceRestStub): @@ -1246,7 +1554,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> job.Job: r"""Call the get job method over HTTP. @@ -1256,8 +1564,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.job.Job: @@ -1267,6 +1577,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetJob._get_http_options() ) + request, metadata = self._interceptor.pre_get_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetJob._get_transcoded_request( @@ -1281,6 +1592,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetJob", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetJob._get_response( self._host, @@ -1301,7 +1639,29 @@ def __call__( pb_resp = job.Job.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = job.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_job", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetResourceAllowance( @@ -1338,7 +1698,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource_allowance.ResourceAllowance: r"""Call the get resource allowance method over HTTP. @@ -1348,8 +1708,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.resource_allowance.ResourceAllowance: @@ -1363,6 +1725,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetResourceAllowance._get_http_options() ) + request, metadata = self._interceptor.pre_get_resource_allowance( request, metadata ) @@ -1375,6 +1738,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetResourceAllowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetResourceAllowance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetResourceAllowance._get_response( self._host, @@ -1395,7 +1785,31 @@ def __call__( pb_resp = resource_allowance.ResourceAllowance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_resource_allowance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resource_allowance.ResourceAllowance.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_resource_allowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetResourceAllowance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetTask(_BaseBatchServiceRestTransport._BaseGetTask, BatchServiceRestStub): @@ -1430,7 +1844,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> task.Task: r"""Call the get task method over HTTP. @@ -1440,8 +1854,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.task.Task: @@ -1451,6 +1867,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetTask._get_http_options() ) + request, metadata = self._interceptor.pre_get_task(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetTask._get_transcoded_request( @@ -1465,6 +1882,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetTask", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetTask._get_response( self._host, @@ -1485,7 +1929,29 @@ def __call__( pb_resp = task.Task.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_task(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = task.Task.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_task", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListJobs(_BaseBatchServiceRestTransport._BaseListJobs, BatchServiceRestStub): @@ -1520,7 +1986,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> batch.ListJobsResponse: r"""Call the list jobs method over HTTP. @@ -1530,8 +1996,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.batch.ListJobsResponse: @@ -1541,6 +2009,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListJobs._get_http_options() ) + request, metadata = self._interceptor.pre_list_jobs(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseListJobs._get_transcoded_request( @@ -1555,6 +2024,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListJobs", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListJobs._get_response( self._host, @@ -1575,7 +2071,29 @@ def __call__( pb_resp = batch.ListJobsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = batch.ListJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_jobs", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListResourceAllowances( @@ -1612,7 +2130,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> batch.ListResourceAllowancesResponse: r"""Call the list resource allowances method over HTTP. @@ -1622,8 +2140,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.batch.ListResourceAllowancesResponse: @@ -1633,6 +2153,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListResourceAllowances._get_http_options() ) + request, metadata = self._interceptor.pre_list_resource_allowances( request, metadata ) @@ -1645,6 +2166,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListResourceAllowances", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListResourceAllowances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListResourceAllowances._get_response( self._host, @@ -1665,7 +2213,31 @@ def __call__( pb_resp = batch.ListResourceAllowancesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_resource_allowances(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = batch.ListResourceAllowancesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_resource_allowances", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListResourceAllowances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTasks( @@ -1702,7 +2274,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> batch.ListTasksResponse: r"""Call the list tasks method over HTTP. @@ -1712,8 +2284,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.batch.ListTasksResponse: @@ -1723,6 +2297,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListTasks._get_http_options() ) + request, metadata = self._interceptor.pre_list_tasks(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseListTasks._get_transcoded_request( @@ -1737,6 +2312,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListTasks", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListTasks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListTasks._get_response( self._host, @@ -1757,7 +2359,29 @@ def __call__( pb_resp = batch.ListTasksResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tasks(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = batch.ListTasksResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_tasks", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListTasks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateJob( @@ -1795,7 +2419,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_job.Job: r"""Call the update job method over HTTP. @@ -1805,8 +2429,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gcb_job.Job: @@ -1816,6 +2442,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseUpdateJob._get_http_options() ) + request, metadata = self._interceptor.pre_update_job(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseUpdateJob._get_transcoded_request( @@ -1834,6 +2461,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.UpdateJob", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "UpdateJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._UpdateJob._get_response( self._host, @@ -1855,7 +2509,29 @@ def __call__( pb_resp = gcb_job.Job.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcb_job.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.update_job", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "UpdateJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateResourceAllowance( @@ -1894,7 +2570,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gcb_resource_allowance.ResourceAllowance: r"""Call the update resource allowance method over HTTP. @@ -1904,8 +2580,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gcb_resource_allowance.ResourceAllowance: @@ -1919,6 +2597,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_http_options() ) + request, metadata = self._interceptor.pre_update_resource_allowance( request, metadata ) @@ -1935,6 +2614,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.UpdateResourceAllowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "UpdateResourceAllowance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._UpdateResourceAllowance._get_response( self._host, @@ -1956,7 +2662,31 @@ def __call__( pb_resp = gcb_resource_allowance.ResourceAllowance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_resource_allowance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcb_resource_allowance.ResourceAllowance.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceClient.update_resource_allowance", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "UpdateResourceAllowance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2097,7 +2827,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -2107,8 +2837,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. @@ -2117,6 +2849,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetLocation._get_http_options() ) + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = ( _BaseBatchServiceRestTransport._BaseGetLocation._get_transcoded_request( @@ -2131,6 +2864,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetLocation._get_response( self._host, @@ -2150,6 +2910,27 @@ def __call__( resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2190,7 +2971,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -2200,8 +2981,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. @@ -2210,6 +2993,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListLocations._get_http_options() ) + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseListLocations._get_transcoded_request( http_options, request @@ -2220,6 +3004,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListLocations._get_response( self._host, @@ -2239,6 +3050,27 @@ def __call__( resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2280,7 +3112,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -2290,13 +3122,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseBatchServiceRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -2313,6 +3148,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._CancelOperation._get_response( self._host, @@ -2369,7 +3231,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -2379,13 +3241,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseBatchServiceRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -2398,6 +3263,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._DeleteOperation._get_response( self._host, @@ -2453,7 +3345,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2463,8 +3355,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -2473,6 +3367,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseGetOperation._get_transcoded_request( http_options, request @@ -2485,6 +3380,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._GetOperation._get_response( self._host, @@ -2504,6 +3426,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2544,7 +3487,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -2554,8 +3497,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -2564,6 +3509,7 @@ def __call__( http_options = ( _BaseBatchServiceRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseBatchServiceRestTransport._BaseListOperations._get_transcoded_request( http_options, request @@ -2574,6 +3520,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = BatchServiceRestTransport._ListOperations._get_response( self._host, @@ -2593,6 +3566,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.batch.v1alpha.BatchService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index c0604d90ceec..35a7daa6bb5b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -181,22 +181,29 @@ class SchedulingPolicy(proto.Enum): class LogsPolicy(proto.Message): - r"""LogsPolicy describes how outputs from a Job's Tasks - (stdout/stderr) will be preserved. + r"""LogsPolicy describes if and how a job's logs are preserved. Logs + include information that is automatically written by the Batch + service agent and any information that you configured the job's + runnables to write to the ``stdout`` or ``stderr`` streams. Attributes: destination (google.cloud.batch_v1alpha.types.LogsPolicy.Destination): - Where logs should be saved. + If and where logs should be saved. logs_path (str): - The path to which logs are saved when the - destination = PATH. This can be a local file - path on the VM, or under the mount point of a - Persistent Disk or Filestore, or a Cloud Storage - path. + When ``destination`` is set to ``PATH``, you must set this + field to the path where you want logs to be saved. This path + can point to a local directory on the VM or (if congifured) + a directory under the mount path of any Cloud Storage + bucket, network file system (NFS), or writable persistent + disk that is mounted to the job. For example, if the job has + a bucket with ``mountPath`` set to ``/mnt/disks/my-bucket``, + you can write logs to the root directory of the + ``remotePath`` of that bucket by setting this field to + ``/mnt/disks/my-bucket/``. cloud_logging_option (google.cloud.batch_v1alpha.types.LogsPolicy.CloudLoggingOption): - Optional. Additional settings for Cloud Logging. It will - only take effect when the destination of ``LogsPolicy`` is - set to ``CLOUD_LOGGING``. + Optional. When ``destination`` is set to ``CLOUD_LOGGING``, + you can optionally set this field to configure additional + settings for Cloud Logging. """ class Destination(proto.Enum): @@ -204,11 +211,14 @@ class Destination(proto.Enum): Values: DESTINATION_UNSPECIFIED (0): - Logs are not preserved. + (Default) Logs are not preserved. CLOUD_LOGGING (1): - Logs are streamed to Cloud Logging. + Logs are streamed to Cloud Logging. Optionally, you can + configure additional settings in the ``cloudLoggingOption`` + field. PATH (2): - Logs are saved to a file path. + Logs are saved to the file path specified in the + ``logsPath`` field. """ DESTINATION_UNSPECIFIED = 0 CLOUD_LOGGING = 1 @@ -220,8 +230,8 @@ class CloudLoggingOption(proto.Message): Attributes: use_generic_task_monitored_resource (bool): - Optional. Set this flag to true to change the `monitored - resource + Optional. Set this field to ``true`` to change the + `monitored resource type `__ for Cloud Logging logs generated by this Batch job from the ```batch.googleapis.com/Job`` `__ @@ -940,9 +950,9 @@ class InstancePolicyOrTemplate(proto.Message): field as 'instance_template' instead of 'template' to avoid C++ keyword conflict. - Batch only supports global instance templates. You can - specify the global instance template as a full or partial - URL. + Batch only supports global instance templates from the same + project as the job. You can specify the global instance + template as a full or partial URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e2df1067e4dd..4e0fce836f45 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Job", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Job", @@ -224,7 +224,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -304,7 +304,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -385,7 +385,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Job", @@ -465,7 +465,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Job", @@ -546,7 +546,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Task", @@ -626,7 +626,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.types.Task", @@ -707,7 +707,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListJobsAsyncPager", @@ -787,7 +787,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListJobsPager", @@ -868,7 +868,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListTasksAsyncPager", @@ -948,7 +948,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListTasksPager", diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7f67670b100c..981b9c62ced1 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -47,7 +47,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -127,7 +127,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -216,7 +216,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -304,7 +304,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -393,7 +393,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", @@ -481,7 +481,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", @@ -562,7 +562,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -642,7 +642,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -723,7 +723,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -803,7 +803,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -884,7 +884,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -964,7 +964,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -1045,7 +1045,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", @@ -1125,7 +1125,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", @@ -1206,7 +1206,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Task", @@ -1286,7 +1286,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Task", @@ -1367,7 +1367,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsAsyncPager", @@ -1447,7 +1447,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsPager", @@ -1528,7 +1528,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesAsyncPager", @@ -1608,7 +1608,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesPager", @@ -1689,7 +1689,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksAsyncPager", @@ -1769,7 +1769,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksPager", @@ -1854,7 +1854,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -1938,7 +1938,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.Job", @@ -2023,7 +2023,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", @@ -2107,7 +2107,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 6f372d5128c2..e39ed8710d54 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -3530,6 +3530,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_job(request) @@ -3590,6 +3591,7 @@ def test_create_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_job(**mock_args) @@ -3720,6 +3722,7 @@ def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_job(request) @@ -3765,6 +3768,7 @@ def test_get_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_job(**mock_args) @@ -3859,6 +3863,7 @@ def test_delete_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_job(**mock_args) @@ -3951,6 +3956,7 @@ def test_list_jobs_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_jobs(**mock_args) @@ -4140,6 +4146,7 @@ def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_task(request) @@ -4187,6 +4194,7 @@ def test_get_task_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_task(**mock_args) @@ -4324,6 +4332,7 @@ def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_tasks(request) @@ -4380,6 +4389,7 @@ def test_list_tasks_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_tasks(**mock_args) @@ -4910,6 +4920,7 @@ def test_create_job_rest_bad_request(request_type=batch.CreateJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_job(request) @@ -5180,6 +5191,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_job(request) # Establish that the response is the type that we expect. @@ -5220,6 +5232,7 @@ def test_create_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gcb_job.Job.to_json(gcb_job.Job()) req.return_value.content = return_value @@ -5262,6 +5275,7 @@ def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_job(request) @@ -5299,6 +5313,7 @@ def test_get_job_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_job(request) # Establish that the response is the type that we expect. @@ -5339,6 +5354,7 @@ def test_get_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = job.Job.to_json(job.Job()) req.return_value.content = return_value @@ -5381,6 +5397,7 @@ def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_job(request) @@ -5411,6 +5428,7 @@ def test_delete_job_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_job(request) # Establish that the response is the type that we expect. @@ -5450,6 +5468,7 @@ def test_delete_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -5492,6 +5511,7 @@ def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_jobs(request) @@ -5528,6 +5548,7 @@ def test_list_jobs_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_jobs(request) # Establish that the response is the type that we expect. @@ -5567,6 +5588,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = batch.ListJobsResponse.to_json(batch.ListJobsResponse()) req.return_value.content = return_value @@ -5611,6 +5633,7 @@ def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_task(request) @@ -5648,6 +5671,7 @@ def test_get_task_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_task(request) # Establish that the response is the type that we expect. @@ -5686,6 +5710,7 @@ def test_get_task_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = task.Task.to_json(task.Task()) req.return_value.content = return_value @@ -5730,6 +5755,7 @@ def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_tasks(request) @@ -5768,6 +5794,7 @@ def test_list_tasks_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_tasks(request) # Establish that the response is the type that we expect. @@ -5807,6 +5834,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = batch.ListTasksResponse.to_json(batch.ListTasksResponse()) req.return_value.content = return_value @@ -5851,6 +5879,7 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_location(request) @@ -5881,6 +5910,7 @@ def test_get_location_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_location(request) @@ -5909,6 +5939,7 @@ def test_list_locations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_locations(request) @@ -5939,6 +5970,7 @@ def test_list_locations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_locations(request) @@ -5969,6 +6001,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -5999,6 +6032,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -6029,6 +6063,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -6059,6 +6094,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -6089,6 +6125,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -6119,6 +6156,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -6149,6 +6187,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -6179,6 +6218,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index d978de4f7fba..204a7506d590 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -6255,6 +6255,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_job(request) @@ -6315,6 +6316,7 @@ def test_create_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_job(**mock_args) @@ -6445,6 +6447,7 @@ def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_job(request) @@ -6490,6 +6493,7 @@ def test_get_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_job(**mock_args) @@ -6584,6 +6588,7 @@ def test_delete_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_job(**mock_args) @@ -6714,6 +6719,7 @@ def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_job(request) @@ -6757,6 +6763,7 @@ def test_cancel_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_job(**mock_args) @@ -6889,6 +6896,7 @@ def test_update_job_rest_required_fields(request_type=batch.UpdateJobRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_job(request) @@ -6950,6 +6958,7 @@ def test_update_job_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_job(**mock_args) @@ -7044,6 +7053,7 @@ def test_list_jobs_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_jobs(**mock_args) @@ -7233,6 +7243,7 @@ def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_task(request) @@ -7280,6 +7291,7 @@ def test_get_task_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_task(**mock_args) @@ -7418,6 +7430,7 @@ def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_tasks(request) @@ -7475,6 +7488,7 @@ def test_list_tasks_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_tasks(**mock_args) @@ -7682,6 +7696,7 @@ def test_create_resource_allowance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_resource_allowance(request) @@ -7748,6 +7763,7 @@ def test_create_resource_allowance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_resource_allowance(**mock_args) @@ -7892,6 +7908,7 @@ def test_get_resource_allowance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_resource_allowance(request) @@ -7939,6 +7956,7 @@ def test_get_resource_allowance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_resource_allowance(**mock_args) @@ -8083,6 +8101,7 @@ def test_delete_resource_allowance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_resource_allowance(request) @@ -8136,6 +8155,7 @@ def test_delete_resource_allowance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_resource_allowance(**mock_args) @@ -8279,6 +8299,7 @@ def test_list_resource_allowances_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_resource_allowances(request) @@ -8332,6 +8353,7 @@ def test_list_resource_allowances_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_resource_allowances(**mock_args) @@ -8534,6 +8556,7 @@ def test_update_resource_allowance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_resource_allowance(request) @@ -8603,6 +8626,7 @@ def test_update_resource_allowance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_resource_allowance(**mock_args) @@ -9438,6 +9462,7 @@ def test_create_job_rest_bad_request(request_type=batch.CreateJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_job(request) @@ -9741,6 +9766,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_job(request) # Establish that the response is the type that we expect. @@ -9784,6 +9810,7 @@ def test_create_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gcb_job.Job.to_json(gcb_job.Job()) req.return_value.content = return_value @@ -9826,6 +9853,7 @@ def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_job(request) @@ -9864,6 +9892,7 @@ def test_get_job_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_job(request) # Establish that the response is the type that we expect. @@ -9905,6 +9934,7 @@ def test_get_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = job.Job.to_json(job.Job()) req.return_value.content = return_value @@ -9947,6 +9977,7 @@ def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_job(request) @@ -9977,6 +10008,7 @@ def test_delete_job_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_job(request) # Establish that the response is the type that we expect. @@ -10016,6 +10048,7 @@ def test_delete_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -10058,6 +10091,7 @@ def test_cancel_job_rest_bad_request(request_type=batch.CancelJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_job(request) @@ -10088,6 +10122,7 @@ def test_cancel_job_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_job(request) # Establish that the response is the type that we expect. @@ -10127,6 +10162,7 @@ def test_cancel_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -10169,6 +10205,7 @@ def test_update_job_rest_bad_request(request_type=batch.UpdateJobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_job(request) @@ -10472,6 +10509,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_job(request) # Establish that the response is the type that we expect. @@ -10515,6 +10553,7 @@ def test_update_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gcb_job.Job.to_json(gcb_job.Job()) req.return_value.content = return_value @@ -10557,6 +10596,7 @@ def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_jobs(request) @@ -10593,6 +10633,7 @@ def test_list_jobs_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_jobs(request) # Establish that the response is the type that we expect. @@ -10632,6 +10673,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = batch.ListJobsResponse.to_json(batch.ListJobsResponse()) req.return_value.content = return_value @@ -10676,6 +10718,7 @@ def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_task(request) @@ -10713,6 +10756,7 @@ def test_get_task_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_task(request) # Establish that the response is the type that we expect. @@ -10751,6 +10795,7 @@ def test_get_task_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = task.Task.to_json(task.Task()) req.return_value.content = return_value @@ -10795,6 +10840,7 @@ def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_tasks(request) @@ -10833,6 +10879,7 @@ def test_list_tasks_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_tasks(request) # Establish that the response is the type that we expect. @@ -10872,6 +10919,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = batch.ListTasksResponse.to_json(batch.ListTasksResponse()) req.return_value.content = return_value @@ -10916,6 +10964,7 @@ def test_create_resource_allowance_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_resource_allowance(request) @@ -11044,6 +11093,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_resource_allowance(request) # Establish that the response is the type that we expect. @@ -11085,6 +11135,7 @@ def test_create_resource_allowance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gcb_resource_allowance.ResourceAllowance.to_json( gcb_resource_allowance.ResourceAllowance() ) @@ -11133,6 +11184,7 @@ def test_get_resource_allowance_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_resource_allowance(request) @@ -11171,6 +11223,7 @@ def test_get_resource_allowance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_resource_allowance(request) # Establish that the response is the type that we expect. @@ -11212,6 +11265,7 @@ def test_get_resource_allowance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = resource_allowance.ResourceAllowance.to_json( resource_allowance.ResourceAllowance() ) @@ -11260,6 +11314,7 @@ def test_delete_resource_allowance_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_resource_allowance(request) @@ -11292,6 +11347,7 @@ def test_delete_resource_allowance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_resource_allowance(request) # Establish that the response is the type that we expect. @@ -11333,6 +11389,7 @@ def test_delete_resource_allowance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -11377,6 +11434,7 @@ def test_list_resource_allowances_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_resource_allowances(request) @@ -11413,6 +11471,7 @@ def test_list_resource_allowances_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_resource_allowances(request) # Establish that the response is the type that we expect. @@ -11454,6 +11513,7 @@ def test_list_resource_allowances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = batch.ListResourceAllowancesResponse.to_json( batch.ListResourceAllowancesResponse() ) @@ -11504,6 +11564,7 @@ def test_update_resource_allowance_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_resource_allowance(request) @@ -11636,6 +11697,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_resource_allowance(request) # Establish that the response is the type that we expect. @@ -11677,6 +11739,7 @@ def test_update_resource_allowance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gcb_resource_allowance.ResourceAllowance.to_json( gcb_resource_allowance.ResourceAllowance() ) @@ -11723,6 +11786,7 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_location(request) @@ -11753,6 +11817,7 @@ def test_get_location_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_location(request) @@ -11781,6 +11846,7 @@ def test_list_locations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_locations(request) @@ -11811,6 +11877,7 @@ def test_list_locations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_locations(request) @@ -11841,6 +11908,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -11871,6 +11939,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -11901,6 +11970,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -11931,6 +12001,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -11961,6 +12032,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -11991,6 +12063,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -12021,6 +12094,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -12051,6 +12125,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request)