From ad3911610a5178942c1ebdd5d8280ca70b890231 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 06:02:41 -0400 Subject: [PATCH] chore(python): use black==22.3.0 (#742) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): use black==22.3.0 Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe * chore(python): use black==22.3.0 * ๐Ÿฆ‰ Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 5 +- google/cloud/storage/_helpers.py | 3 +- google/cloud/storage/_signing.py | 2 +- google/cloud/storage/acl.py | 5 +- google/cloud/storage/blob.py | 12 +- google/cloud/storage/bucket.py | 8 +- google/cloud/storage/client.py | 10 +- google/cloud/storage/hmac_key.py | 21 ++- google/cloud/storage/notification.py | 33 ++-- noxfile.py | 9 +- samples/snippets/noxfile.py | 2 +- tests/perf/storage_pb2_grpc.py | 12 +- tests/system/conftest.py | 4 +- tests/system/test__signing.py | 49 ++++-- tests/system/test_blob.py | 119 +++++++++++---- tests/system/test_bucket.py | 83 +++++++--- tests/system/test_client.py | 17 ++- tests/system/test_fileio.py | 11 +- tests/system/test_kms_integration.py | 21 ++- tests/system/test_notification.py | 11 +- tests/unit/test__helpers.py | 7 +- tests/unit/test__signing.py | 10 +- tests/unit/test_batch.py | 6 +- tests/unit/test_blob.py | 218 +++++++++++++++------------ tests/unit/test_bucket.py | 33 +++- tests/unit/test_client.py | 29 +++- tests/unit/test_fileio.py | 22 ++- 28 files changed, 527 insertions(+), 237 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7e08e05a3..87dd00611 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/docs/conf.py b/docs/conf.py index fc9d1fd34..7a2f13fca 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 5a1c86c48..30866c8a3 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -69,7 +69,8 @@ def _get_storage_host(): def _get_environ_project(): return os.getenv( - environment_vars.PROJECT, os.getenv(environment_vars.LEGACY_PROJECT), + environment_vars.PROJECT, + os.getenv(environment_vars.LEGACY_PROJECT), ) diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py index a2b7209bc..837ef6211 100644 --- a/google/cloud/storage/_signing.py +++ b/google/cloud/storage/_signing.py @@ -109,7 +109,7 @@ def get_expiration_seconds_v2(expiration): # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): micros = _helpers._microseconds_from_datetime(expiration) - expiration = micros // 10 ** 6 + expiration = micros // 10**6 if not isinstance(expiration, int): raise TypeError( diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index b3b77766f..ef2bca356 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -460,7 +460,10 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): self.entities.clear() found = client._get_resource( - path, query_params=query_params, timeout=timeout, retry=retry, + path, + query_params=query_params, + timeout=timeout, + retry=retry, ) self.loaded = True diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index caa4a164f..a4e1d402d 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -87,11 +87,11 @@ _API_ACCESS_ENDPOINT = "https://storage.googleapis.com" -_DEFAULT_CONTENT_TYPE = u"application/octet-stream" -_DOWNLOAD_URL_TEMPLATE = u"{hostname}/download/storage/v1{path}?alt=media" -_BASE_UPLOAD_TEMPLATE = u"{hostname}/upload/storage/v1{bucket_path}/o?uploadType=" -_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" -_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable" +_DEFAULT_CONTENT_TYPE = "application/octet-stream" +_DOWNLOAD_URL_TEMPLATE = "{hostname}/download/storage/v1{path}?alt=media" +_BASE_UPLOAD_TEMPLATE = "{hostname}/upload/storage/v1{bucket_path}/o?uploadType=" +_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + "multipart" +_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + "resumable" # NOTE: "acl" is also writeable but we defer ACL management to # the classes in the google.cloud.storage.acl module. _CONTENT_TYPE_FIELD = "contentType" @@ -4459,7 +4459,7 @@ def _raise_from_invalid_response(error): else: error_message = str(error) - message = u"{method} {url}: {error}".format( + message = "{method} {url}: {error}".format( method=response.request.method, url=response.request.url, error=error_message ) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index d071615ef..0fa5894b7 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1383,7 +1383,10 @@ def list_notifications( client = self._require_client(client) path = self.path + "/notificationConfigs" iterator = client._list_resource( - path, _item_to_notification, timeout=timeout, retry=retry, + path, + _item_to_notification, + timeout=timeout, + retry=retry, ) iterator.bucket = self return iterator @@ -2952,7 +2955,8 @@ def make_public( for blob in blobs: blob.acl.all().grant_read() blob.acl.save( - client=client, timeout=timeout, + client=client, + timeout=timeout, ) def make_private( diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 4e0c51e70..3b335cf7b 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -1130,7 +1130,9 @@ def download_blob_to_file( headers = _get_encryption_headers(blob_or_uri._encryption_key) headers["accept-encoding"] = "gzip" _add_etag_match_headers( - headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match, + headers, + if_etag_match=if_etag_match, + if_etag_not_match=if_etag_not_match, ) headers = {**_get_default_headers(self._connection.user_agent), **headers} @@ -1475,7 +1477,11 @@ def create_hmac_key( qs_params["userProject"] = user_project api_response = self._post_resource( - path, None, query_params=qs_params, timeout=timeout, retry=retry, + path, + None, + query_params=qs_params, + timeout=timeout, + retry=retry, ) metadata = HMACKeyMetadata(self) metadata._properties = api_response["metadata"] diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index 5cec51fa7..1636aaba4 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -211,7 +211,10 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): qs_params["userProject"] = self.user_project self._client._get_resource( - self.path, query_params=qs_params, timeout=timeout, retry=retry, + self.path, + query_params=qs_params, + timeout=timeout, + retry=retry, ) except NotFound: return False @@ -239,7 +242,10 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): qs_params["userProject"] = self.user_project self._properties = self._client._get_resource( - self.path, query_params=qs_params, timeout=timeout, retry=retry, + self.path, + query_params=qs_params, + timeout=timeout, + retry=retry, ) def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): @@ -263,7 +269,11 @@ def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): payload = {"state": self.state} self._properties = self._client._put_resource( - self.path, payload, query_params=qs_params, timeout=timeout, retry=retry, + self.path, + payload, + query_params=qs_params, + timeout=timeout, + retry=retry, ) def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): @@ -289,5 +299,8 @@ def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): qs_params["userProject"] = self.user_project self._client._delete_resource( - self.path, query_params=qs_params, timeout=timeout, retry=retry, + self.path, + query_params=qs_params, + timeout=timeout, + retry=retry, ) diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index 57faea571..0cdb87fa8 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -156,26 +156,22 @@ def topic_name(self): @property def topic_project(self): - """Project ID of topic to which notifications are published. - """ + """Project ID of topic to which notifications are published.""" return self._topic_project @property def custom_attributes(self): - """Custom attributes passed with notification events. - """ + """Custom attributes passed with notification events.""" return self._properties.get("custom_attributes") @property def event_types(self): - """Event types for which notification events are published. - """ + """Event types for which notification events are published.""" return self._properties.get("event_types") @property def blob_name_prefix(self): - """Prefix of blob names for which notification events are published. - """ + """Prefix of blob names for which notification events are published.""" return self._properties.get("object_name_prefix") @property @@ -278,7 +274,11 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None): ) self._properties = client._post_resource( - path, properties, query_params=query_params, timeout=timeout, retry=retry, + path, + properties, + query_params=query_params, + timeout=timeout, + retry=retry, ) def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): @@ -318,7 +318,10 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): try: client._get_resource( - self.path, query_params=query_params, timeout=timeout, retry=retry, + self.path, + query_params=query_params, + timeout=timeout, + retry=retry, ) except NotFound: return False @@ -360,7 +363,10 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): query_params["userProject"] = self.bucket.user_project response = client._get_resource( - self.path, query_params=query_params, timeout=timeout, retry=retry, + self.path, + query_params=query_params, + timeout=timeout, + retry=retry, ) self._set_properties(response) @@ -400,7 +406,10 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): query_params["userProject"] = self.bucket.user_project client._delete_resource( - self.path, query_params=query_params, timeout=timeout, retry=retry, + self.path, + query_params=query_params, + timeout=timeout, + retry=retry, ) diff --git a/noxfile.py b/noxfile.py index 069a486c1..ac02aa1f5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -46,7 +46,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -59,7 +61,8 @@ def blacken(session): """ session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 4c808af73..949e0fde9 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. diff --git a/tests/perf/storage_pb2_grpc.py b/tests/perf/storage_pb2_grpc.py index 1b3a2c82f..913c40558 100644 --- a/tests/perf/storage_pb2_grpc.py +++ b/tests/perf/storage_pb2_grpc.py @@ -11,9 +11,9 @@ class StorageBenchWrapperStub(object): def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.Write = channel.unary_unary( "/storage_bench.StorageBenchWrapper/Write", request_serializer=storage__pb2.ObjectWrite.SerializeToString, @@ -31,15 +31,13 @@ class StorageBenchWrapperServicer(object): pass def Write(self, request, context): - """Performs an upload from a specific object. - """ + """Performs an upload from a specific object.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Read(self, request, context): - """Read a specific object. - """ + """Read a specific object.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 02a13d140..c42f62e99 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -108,7 +108,9 @@ def listable_bucket(storage_client, listable_bucket_name, file_data): for filename in _listable_filenames[1:]: _helpers.retry_bad_copy(bucket.copy_blob)( - source_blob, bucket, filename, + source_blob, + bucket, + filename, ) yield bucket diff --git a/tests/system/test__signing.py b/tests/system/test__signing.py index 04c3687a4..a33f2db4e 100644 --- a/tests/system/test__signing.py +++ b/tests/system/test__signing.py @@ -50,7 +50,9 @@ def _create_signed_list_blobs_url_helper( def test_create_signed_list_blobs_url_v2(storage_client, signing_bucket, no_mtls): _create_signed_list_blobs_url_helper( - storage_client, signing_bucket, version="v2", + storage_client, + signing_bucket, + version="v2", ) @@ -61,13 +63,18 @@ def test_create_signed_list_blobs_url_v2_w_expiration( delta = datetime.timedelta(seconds=10) _create_signed_list_blobs_url_helper( - storage_client, signing_bucket, expiration=now + delta, version="v2", + storage_client, + signing_bucket, + expiration=now + delta, + version="v2", ) def test_create_signed_list_blobs_url_v4(storage_client, signing_bucket, no_mtls): _create_signed_list_blobs_url_helper( - storage_client, signing_bucket, version="v4", + storage_client, + signing_bucket, + version="v4", ) @@ -77,7 +84,10 @@ def test_create_signed_list_blobs_url_v4_w_expiration( now = datetime.datetime.utcnow() delta = datetime.timedelta(seconds=10) _create_signed_list_blobs_url_helper( - storage_client, signing_bucket, expiration=now + delta, version="v4", + storage_client, + signing_bucket, + expiration=now + delta, + version="v4", ) @@ -135,7 +145,9 @@ def test_create_signed_read_url_v2(storage_client, signing_bucket, no_mtls): def test_create_signed_read_url_v4(storage_client, signing_bucket, no_mtls): _create_signed_read_url_helper( - storage_client, signing_bucket, version="v4", + storage_client, + signing_bucket, + version="v4", ) @@ -180,7 +192,7 @@ def test_create_signed_read_url_v2_w_non_ascii_name( _create_signed_read_url_helper( storage_client, signing_bucket, - blob_name=u"Caf\xe9.txt", + blob_name="Caf\xe9.txt", payload=b"Test signed URL for blob w/ non-ASCII name", ) @@ -191,7 +203,7 @@ def test_create_signed_read_url_v4_w_non_ascii_name( _create_signed_read_url_helper( storage_client, signing_bucket, - blob_name=u"Caf\xe9.txt", + blob_name="Caf\xe9.txt", payload=b"Test signed URL for blob w/ non-ASCII name", version="v4", ) @@ -276,7 +288,10 @@ def _create_signed_delete_url_helper(client, bucket, version="v2", expiration=No blob.upload_from_string(b"DELETE ME!") signed_delete_url = blob.generate_signed_url( - expiration=expiration, method="DELETE", client=client, version=version, + expiration=expiration, + method="DELETE", + client=client, + version=version, ) response = requests.request("DELETE", signed_delete_url) @@ -303,7 +318,10 @@ def _create_signed_resumable_upload_url_helper( # Initiate the upload using a signed URL. signed_resumable_upload_url = blob.generate_signed_url( - expiration=expiration, method="RESUMABLE", client=client, version=version, + expiration=expiration, + method="RESUMABLE", + client=client, + version=version, ) post_headers = {"x-goog-resumable": "start"} @@ -327,7 +345,10 @@ def _create_signed_resumable_upload_url_helper( # Finally, delete the blob using a signed URL. signed_delete_url = blob.generate_signed_url( - expiration=expiration, method="DELETE", client=client, version=version, + expiration=expiration, + method="DELETE", + client=client, + version=version, ) delete_response = requests.delete(signed_delete_url) @@ -336,13 +357,17 @@ def _create_signed_resumable_upload_url_helper( def test_create_signed_resumable_upload_url_v2(storage_client, signing_bucket, no_mtls): _create_signed_resumable_upload_url_helper( - storage_client, signing_bucket, version="v2", + storage_client, + signing_bucket, + version="v2", ) def test_create_signed_resumable_upload_url_v4(storage_client, signing_bucket, no_mtls): _create_signed_resumable_upload_url_helper( - storage_client, signing_bucket, version="v4", + storage_client, + signing_bucket, + version="v4", ) diff --git a/tests/system/test_blob.py b/tests/system/test_blob.py index b6d5216a7..acbc5745f 100644 --- a/tests/system/test_blob.py +++ b/tests/system/test_blob.py @@ -39,7 +39,10 @@ def _check_blob_hash(blob, info): def test_large_file_write_from_stream( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile") @@ -52,7 +55,10 @@ def test_large_file_write_from_stream( def test_large_file_write_from_stream_w_checksum( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile") @@ -65,7 +71,10 @@ def test_large_file_write_from_stream_w_checksum( def test_large_file_write_from_stream_w_failed_checksum( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile") @@ -88,7 +97,11 @@ def test_large_file_write_from_stream_w_failed_checksum( def test_large_file_write_from_stream_w_encryption_key( - storage_client, shared_bucket, blobs_to_delete, file_data, service_account, + storage_client, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile", encryption_key=encryption_key) @@ -110,7 +123,10 @@ def test_large_file_write_from_stream_w_encryption_key( def test_small_file_write_from_filename( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("SmallFile") @@ -122,7 +138,10 @@ def test_small_file_write_from_filename( def test_small_file_write_from_filename_with_checksum( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("SmallFile") @@ -134,7 +153,10 @@ def test_small_file_write_from_filename_with_checksum( def test_small_file_write_from_filename_with_failed_checksum( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("SmallFile") @@ -235,7 +257,10 @@ def test_blob_crud_w_user_project( def test_blob_crud_w_etag_match( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): wrong_etag = "kittens" @@ -281,7 +306,10 @@ def test_blob_crud_w_etag_match( def test_blob_crud_w_generation_match( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): wrong_generation_number = 6 wrong_metageneration_number = 9 @@ -373,7 +401,10 @@ def test_blob_acl_w_user_project( def test_blob_acl_w_metageneration_match( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): wrong_metageneration_number = 9 wrong_generation_number = 6 @@ -408,7 +439,10 @@ def test_blob_acl_w_metageneration_match( def test_blob_acl_upload_predefined( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): control = shared_bucket.blob("logo") control_info = file_data["logo"] @@ -438,7 +472,10 @@ def test_blob_acl_upload_predefined( def test_blob_patch_metadata( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): filename = file_data["logo"]["path"] blob_name = os.path.basename(filename) @@ -467,7 +504,9 @@ def test_blob_patch_metadata( def test_blob_direct_write_and_read_into_file( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): payload = b"Hello World" blob = shared_bucket.blob("MyBuffer") @@ -489,7 +528,9 @@ def test_blob_direct_write_and_read_into_file( def test_blob_download_w_generation_match( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): wrong_generation_number = 6 @@ -522,7 +563,9 @@ def test_blob_download_w_generation_match( def test_blob_download_w_failed_crc32c_checksum( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): blob = shared_bucket.blob("FailedChecksumBlob") payload = b"Hello World" @@ -555,7 +598,9 @@ def test_blob_download_w_failed_crc32c_checksum( def test_blob_download_as_text( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): blob = shared_bucket.blob("MyBuffer") payload = "Hello World" @@ -571,7 +616,9 @@ def test_blob_download_as_text( def test_blob_upload_w_gzip_encoded_download_raw( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): payload = b"DEADBEEF" * 1000 raw_stream = io.BytesIO() @@ -592,7 +639,10 @@ def test_blob_upload_w_gzip_encoded_download_raw( def test_blob_upload_from_file_resumable_with_generation( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile") wrong_generation = 3 @@ -616,18 +666,23 @@ def test_blob_upload_from_file_resumable_with_generation( with pytest.raises(exceptions.PreconditionFailed): with open(info["path"], "rb") as file_obj: blob.upload_from_file( - file_obj, if_generation_match=wrong_generation, + file_obj, + if_generation_match=wrong_generation, ) with pytest.raises(exceptions.PreconditionFailed): with open(info["path"], "rb") as file_obj: blob.upload_from_file( - file_obj, if_metageneration_match=wrong_meta_generation, + file_obj, + if_metageneration_match=wrong_meta_generation, ) def test_blob_upload_from_string_w_owner( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("MyBuffer") payload = b"Hello World" @@ -642,7 +697,10 @@ def test_blob_upload_from_string_w_owner( def test_blob_upload_from_string_w_custom_time( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("CustomTimeBlob") payload = b"Hello World" @@ -658,7 +716,10 @@ def test_blob_upload_from_string_w_custom_time( def test_blob_upload_from_string_w_custom_time_no_micros( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): # Test that timestamps without microseconds are treated correctly by # custom_time encoding/decoding. @@ -676,7 +737,10 @@ def test_blob_upload_from_string_w_custom_time_no_micros( def test_blob_upload_download_crc32_md5_hash( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("MyBuffer") payload = b"Hello World" @@ -693,8 +757,8 @@ def test_blob_upload_download_crc32_md5_hash( @pytest.mark.parametrize( "blob_name,payload", [ - (u"Caf\u00e9", b"Normalization Form C"), - (u"Cafe\u0301", b"Normalization Form D"), + ("Caf\u00e9", b"Normalization Form C"), + ("Cafe\u0301", b"Normalization Form D"), ], ) def test_blob_w_unicode_names(blob_name, payload, shared_bucket, blobs_to_delete): @@ -841,7 +905,8 @@ def test_blob_compose_w_source_generation_match(shared_bucket, blobs_to_delete): with pytest.raises(exceptions.PreconditionFailed): original.compose( - [original, to_append], if_source_generation_match=wrong_source_generations, + [original, to_append], + if_source_generation_match=wrong_source_generations, ) original.compose( diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py index dc1869d2f..4826ce8a6 100644 --- a/tests/system/test_bucket.py +++ b/tests/system/test_bucket.py @@ -117,7 +117,9 @@ def test_bucket_update_labels(storage_client, buckets_to_delete): def test_bucket_get_set_iam_policy( - storage_client, buckets_to_delete, service_account, + storage_client, + buckets_to_delete, + service_account, ): from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE from google.api_core.exceptions import BadRequest @@ -177,7 +179,10 @@ def test_bucket_crud_w_requester_pays(storage_client, buckets_to_delete, user_pr assert created.name == bucket_name assert created.requester_pays - with_user_project = storage_client.bucket(bucket_name, user_project=user_project,) + with_user_project = storage_client.bucket( + bucket_name, + user_project=user_project, + ) try: # Exercise 'buckets.get' w/ userProject. @@ -210,7 +215,8 @@ def test_bucket_acls_iam_w_user_project( ): bucket_name = _helpers.unique_name("acl-w-user-project") created = _helpers.retry_429_503(storage_client.create_bucket)( - bucket_name, requester_pays=True, + bucket_name, + requester_pays=True, ) buckets_to_delete.append(created) @@ -282,7 +288,10 @@ def test_bucket_acls_w_metageneration_match(storage_client, buckets_to_delete): def test_bucket_copy_blob( - storage_client, buckets_to_delete, blobs_to_delete, user_project, + storage_client, + buckets_to_delete, + blobs_to_delete, + user_project, ): payload = b"DEADBEEF" bucket_name = _helpers.unique_name("copy-blob") @@ -304,7 +313,10 @@ def test_bucket_copy_blob( def test_bucket_copy_blob_w_user_project( - storage_client, buckets_to_delete, blobs_to_delete, user_project, + storage_client, + buckets_to_delete, + blobs_to_delete, + user_project, ): payload = b"DEADBEEF" bucket_name = _helpers.unique_name("copy-w-requester-pays") @@ -330,7 +342,9 @@ def test_bucket_copy_blob_w_user_project( def test_bucket_copy_blob_w_generation_match( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): payload = b"DEADBEEF" bucket_name = _helpers.unique_name("generation-match") @@ -345,7 +359,10 @@ def test_bucket_copy_blob_w_generation_match( dest_bucket = storage_client.bucket(bucket_name) new_blob = dest_bucket.copy_blob( - blob, dest_bucket, "simple-copy", if_source_generation_match=blob.generation, + blob, + dest_bucket, + "simple-copy", + if_source_generation_match=blob.generation, ) blobs_to_delete.append(new_blob) @@ -353,7 +370,9 @@ def test_bucket_copy_blob_w_generation_match( def test_bucket_copy_blob_w_metageneration_match( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): payload = b"DEADBEEF" bucket_name = _helpers.unique_name("generation-match") @@ -381,7 +400,10 @@ def test_bucket_copy_blob_w_metageneration_match( def test_bucket_get_blob_with_user_project( - storage_client, buckets_to_delete, blobs_to_delete, user_project, + storage_client, + buckets_to_delete, + blobs_to_delete, + user_project, ): blob_name = "blob-name" payload = b"DEADBEEF" @@ -413,7 +435,10 @@ def test_bucket_list_blobs(listable_bucket, listable_filenames): @_helpers.retry_failures def test_bucket_list_blobs_w_user_project( - storage_client, listable_bucket, listable_filenames, user_project, + storage_client, + listable_bucket, + listable_filenames, + user_project, ): with_user_project = storage_client.bucket( listable_bucket.name, user_project=user_project @@ -545,7 +570,8 @@ def test_bucket_list_blobs_hierarchy_third_level(hierarchy_bucket, hierarchy_fil @_helpers.retry_failures def test_bucket_list_blobs_hierarchy_w_include_trailing_delimiter( - hierarchy_bucket, hierarchy_filenames, + hierarchy_bucket, + hierarchy_filenames, ): expected_names = ["file01.txt", "parent/"] expected_prefixes = set(["parent/"]) @@ -562,7 +588,9 @@ def test_bucket_list_blobs_hierarchy_w_include_trailing_delimiter( def test_bucket_w_retention_period( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): period_secs = 10 bucket_name = _helpers.unique_name("w-retention-period") @@ -613,7 +641,9 @@ def test_bucket_w_retention_period( def test_bucket_w_default_event_based_hold( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): bucket_name = _helpers.unique_name("w-def-ebh") bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) @@ -670,7 +700,9 @@ def test_bucket_w_default_event_based_hold( def test_blob_w_temporary_hold( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): bucket_name = _helpers.unique_name("w-tmp-hold") bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) @@ -702,7 +734,8 @@ def test_blob_w_temporary_hold( def test_bucket_lock_retention_policy( - storage_client, buckets_to_delete, + storage_client, + buckets_to_delete, ): period_secs = 10 bucket_name = _helpers.unique_name("loc-ret-policy") @@ -728,7 +761,9 @@ def test_bucket_lock_retention_policy( def test_new_bucket_w_ubla( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): bucket_name = _helpers.unique_name("new-w-ubla") bucket = storage_client.bucket(bucket_name) @@ -765,7 +800,9 @@ def test_new_bucket_w_ubla( def test_ubla_set_unset_preserves_acls( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): bucket_name = _helpers.unique_name("ubla-acls") bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) @@ -806,7 +843,9 @@ def test_ubla_set_unset_preserves_acls( def test_new_bucket_created_w_inherited_pap( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): from google.cloud.storage import constants @@ -857,7 +896,9 @@ def test_new_bucket_created_w_inherited_pap( @pytest.mark.skip(reason="Unspecified PAP is changing to inherited") def test_new_bucket_created_w_enforced_pap( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): from google.cloud.storage import constants @@ -888,7 +929,9 @@ def test_new_bucket_created_w_enforced_pap( def test_new_bucket_with_rpo( - storage_client, buckets_to_delete, blobs_to_delete, + storage_client, + buckets_to_delete, + blobs_to_delete, ): from google.cloud.storage import constants diff --git a/tests/system/test_client.py b/tests/system/test_client.py index f531f4bb4..3329ee7a3 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -33,7 +33,8 @@ def test_anonymous_client_access_to_public_bucket(): anonymous_client = Client.create_anonymous_client() bucket = anonymous_client.bucket(public_bucket) (blob,) = _helpers.retry_429_503(anonymous_client.list_blobs)( - bucket, max_results=1, + bucket, + max_results=1, ) with tempfile.TemporaryFile() as stream: _helpers.retry_429_503(blob.download_to_file)(stream) @@ -85,7 +86,10 @@ def test_list_buckets(storage_client, buckets_to_delete): def test_download_blob_to_file_w_uri( - storage_client, shared_bucket, blobs_to_delete, service_account, + storage_client, + shared_bucket, + blobs_to_delete, + service_account, ): blob = shared_bucket.blob("MyBuffer") payload = b"Hello World" @@ -106,7 +110,10 @@ def test_download_blob_to_file_w_uri( def test_download_blob_to_file_w_etag( - storage_client, shared_bucket, blobs_to_delete, service_account, + storage_client, + shared_bucket, + blobs_to_delete, + service_account, ): filename = "kittens" blob = shared_bucket.blob(filename) @@ -140,6 +147,8 @@ def test_download_blob_to_file_w_etag( buffer = io.BytesIO() storage_client.download_blob_to_file( - "gs://" + shared_bucket.name + "/" + filename, buffer, if_etag_match=blob.etag, + "gs://" + shared_bucket.name + "/" + filename, + buffer, + if_etag_match=blob.etag, ) assert buffer.getvalue() == payload diff --git a/tests/system/test_fileio.py b/tests/system/test_fileio.py index 79bf0c1eb..79e0ab7da 100644 --- a/tests/system/test_fileio.py +++ b/tests/system/test_fileio.py @@ -18,7 +18,10 @@ def test_blobwriter_and_blobreader( - shared_bucket, blobs_to_delete, file_data, service_account, + shared_bucket, + blobs_to_delete, + file_data, + service_account, ): blob = shared_bucket.blob("LargeFile") @@ -49,12 +52,14 @@ def test_blobwriter_and_blobreader( def test_blobwriter_and_blobreader_text_mode( - shared_bucket, blobs_to_delete, service_account, + shared_bucket, + blobs_to_delete, + service_account, ): blob = shared_bucket.blob("MultibyteTextFile") # Construct a multibyte text_data sample file. - base_multibyte_text_string = u"abcde ใ‚ใ„ใ†ใˆใŠ line: " + base_multibyte_text_string = "abcde ใ‚ใ„ใ†ใˆใŠ line: " text_data = "\n".join([base_multibyte_text_string + str(x) for x in range(100)]) # Test text BlobWriter works. diff --git a/tests/system/test_kms_integration.py b/tests/system/test_kms_integration.py index 67dc5351f..9636acd54 100644 --- a/tests/system/test_kms_integration.py +++ b/tests/system/test_kms_integration.py @@ -28,7 +28,10 @@ def _kms_key_name(client, bucket, key_name): return _key_name_format.format( - client.project, bucket.location.lower(), keyring_name, key_name, + client.project, + bucket.location.lower(), + keyring_name, + key_name, ) @@ -127,7 +130,11 @@ def test_blob_w_explicit_kms_key_name( @_helpers.retry_failures def test_bucket_w_default_kms_key_name( - kms_bucket, blobs_to_delete, kms_key_name, alt_kms_key_name, file_data, + kms_bucket, + blobs_to_delete, + kms_key_name, + alt_kms_key_name, + file_data, ): blob_name = "default-kms-key-name" override_blob_name = "override-default-kms-key-name" @@ -183,7 +190,10 @@ def test_bucket_w_default_kms_key_name( def test_blob_rewrite_rotate_csek_to_cmek( - kms_bucket, blobs_to_delete, kms_key_name, file_data, + kms_bucket, + blobs_to_delete, + kms_key_name, + file_data, ): blob_name = "rotating-keys" source_key = os.urandom(32) @@ -216,7 +226,10 @@ def test_blob_rewrite_rotate_csek_to_cmek( def test_blob_upload_w_bucket_cmek_enabled( - kms_bucket, blobs_to_delete, kms_key_name, file_data, + kms_bucket, + blobs_to_delete, + kms_key_name, + file_data, ): blob_name = "test-blob" payload = b"DEADBEEF" diff --git a/tests/system/test_notification.py b/tests/system/test_notification.py index 6c49064aa..59d0dfafd 100644 --- a/tests/system/test_notification.py +++ b/tests/system/test_notification.py @@ -70,7 +70,10 @@ def notification_topic(storage_client, publisher_client, topic_path, no_mtls): def test_notification_create_minimal( - storage_client, buckets_to_delete, topic_name, notification_topic, + storage_client, + buckets_to_delete, + topic_name, + notification_topic, ): bucket_name = _helpers.unique_name("notification-minimal") bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) @@ -126,7 +129,11 @@ def test_notification_create_explicit( def test_notification_create_w_user_project( - storage_client, buckets_to_delete, topic_name, notification_topic, user_project, + storage_client, + buckets_to_delete, + topic_name, + notification_topic, + user_project, ): bucket_name = _helpers.unique_name("notification-w-up") bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 1b0a033dc..6c8770576 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -176,7 +176,9 @@ def test_reload_w_etag_match(self): derived._changes = object() derived.client = client - derived.reload(if_etag_match=etag,) + derived.reload( + if_etag_match=etag, + ) self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) @@ -453,7 +455,8 @@ def test_update_with_metageneration_not_match_w_timeout_w_retry(self): timeout = 42 derived.update( - if_metageneration_not_match=generation_number, timeout=timeout, + if_metageneration_not_match=generation_number, + timeout=timeout, ) self.assertEqual(derived._properties, {"foo": "Foo"}) diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py index f863460c5..48c9a00e1 100644 --- a/tests/unit/test__signing.py +++ b/tests/unit/test__signing.py @@ -678,7 +678,9 @@ def test_with_signer_email(self): credentials = _make_credentials(signer_email=signer_email) credentials.sign_bytes.return_value = b"DEADBEEF" self._call_fut( - credentials, resource=resource, expiration=datetime.timedelta(days=5), + credentials, + resource=resource, + expiration=datetime.timedelta(days=5), ) def test_with_service_account_email_and_signer_email(self): @@ -873,8 +875,10 @@ def test_conformance_blob(test_data): # For the VIRTUAL_HOSTED_STYLE else: - _API_ACCESS_ENDPOINT = "{scheme}://{bucket_name}.storage.googleapis.com".format( - scheme=test_data["scheme"], bucket_name=test_data["bucket"] + _API_ACCESS_ENDPOINT = ( + "{scheme}://{bucket_name}.storage.googleapis.com".format( + scheme=test_data["scheme"], bucket_name=test_data["bucket"] + ) ) resource = "/{}".format(test_data["object"]) _run_conformance_test(resource, test_data, _API_ACCESS_ENDPOINT) diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index 89bf583e9..8b347fcf8 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -577,9 +577,9 @@ def _unpack_helper(self, response, content): self.assertEqual(len(result), 3) self.assertEqual(result[0].status_code, http.client.OK) - self.assertEqual(result[0].json(), {u"bar": 2, u"foo": 1}) + self.assertEqual(result[0].json(), {"bar": 2, "foo": 1}) self.assertEqual(result[1].status_code, http.client.OK) - self.assertEqual(result[1].json(), {u"foo": 1, u"bar": 3}) + self.assertEqual(result[1].json(), {"foo": 1, "bar": 3}) self.assertEqual(result[2].status_code, http.client.NO_CONTENT) def test_bytes_headers(self): @@ -588,7 +588,7 @@ def test_bytes_headers(self): self._unpack_helper(RESPONSE, CONTENT) def test_unicode_headers(self): - RESPONSE = {"content-type": u'multipart/mixed; boundary="DEADBEEF="'} + RESPONSE = {"content-type": 'multipart/mixed; boundary="DEADBEEF="'} CONTENT = _THREE_PART_MIME_RESPONSE self._unpack_helper(RESPONSE, CONTENT) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index ba1b38752..f48b4a1e2 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -79,7 +79,7 @@ def test_ctor_wo_encryption_key(self): def test_ctor_with_encoded_unicode(self): blob_name = b"wet \xe2\x9b\xb5" blob = self._make_one(blob_name, bucket=None) - unicode_name = u"wet \N{sailboat}" + unicode_name = "wet \N{sailboat}" self.assertNotIsInstance(blob.name, bytes) self.assertIsInstance(blob.name, str) self.assertEqual(blob.name, unicode_name) @@ -262,7 +262,7 @@ def test_acl_property(self): from google.cloud.storage.acl import ObjectACL fake_bucket = _Bucket() - blob = self._make_one(u"name", bucket=fake_bucket) + blob = self._make_one("name", bucket=fake_bucket) acl = blob.acl self.assertIsInstance(acl, ObjectACL) self.assertIs(acl, blob._acl) @@ -305,13 +305,13 @@ def test_kms_key_name_setter(self): def test_path_bad_bucket(self): fake_bucket = object() - name = u"blob-name" + name = "blob-name" blob = self._make_one(name, bucket=fake_bucket) self.assertRaises(AttributeError, getattr, blob, "path") def test_path_no_name(self): bucket = _Bucket() - blob = self._make_one(u"", bucket=bucket) + blob = self._make_one("", bucket=bucket) self.assertRaises(ValueError, getattr, blob, "path") def test_path_normal(self): @@ -327,7 +327,7 @@ def test_path_w_slash_in_name(self): self.assertEqual(blob.path, "/b/name/o/parent%2Fchild") def test_path_with_non_ascii(self): - blob_name = u"Caf\xe9" + blob_name = "Caf\xe9" bucket = _Bucket() blob = self._make_one(blob_name, bucket=bucket) self.assertEqual(blob.path, "/b/name/o/Caf%C3%A9") @@ -417,7 +417,7 @@ def test_public_url_w_tilde_in_name(self): self.assertEqual(blob.public_url, "https://storage.googleapis.com/name/foo~bar") def test_public_url_with_non_ascii(self): - blob_name = u"winter \N{snowman}" + blob_name = "winter \N{snowman}" bucket = _Bucket() blob = self._make_one(blob_name, bucket=bucket) expected_url = "https://storage.googleapis.com/name/winter%20%E2%98%83" @@ -570,7 +570,7 @@ def test_generate_signed_url_v2_w_expiration(self): self._generate_signed_url_v2_helper(expiration=expiration) def test_generate_signed_url_v2_w_non_ascii_name(self): - BLOB_NAME = u"\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt" + BLOB_NAME = "\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt" self._generate_signed_url_v2_helper(blob_name=BLOB_NAME) def test_generate_signed_url_v2_w_slash_in_name(self): @@ -630,7 +630,7 @@ def test_generate_signed_url_v4_w_defaults(self): self._generate_signed_url_v4_helper() def test_generate_signed_url_v4_w_non_ascii_name(self): - BLOB_NAME = u"\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt" + BLOB_NAME = "\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt" self._generate_signed_url_v4_helper(blob_name=BLOB_NAME) def test_generate_signed_url_v4_w_slash_in_name(self): @@ -770,7 +770,12 @@ def test_exists_w_etag_match(self): bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - self.assertTrue(blob.exists(if_etag_match=etag, retry=None,)) + self.assertTrue( + blob.exists( + if_etag_match=etag, + retry=None, + ) + ) expected_query_params = { "fields": "name", @@ -901,9 +906,9 @@ def test_delete_w_generation_match(self): ) def test__get_transport(self): - client = mock.Mock(spec=[u"_credentials", "_http"]) + client = mock.Mock(spec=["_credentials", "_http"]) client._http = mock.sentinel.transport - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) transport = blob._get_transport(client) @@ -1896,7 +1901,7 @@ def _download_as_text_helper( encoding=None, charset=None, no_charset=False, - expected_value=u"DEADBEEF", + expected_value="DEADBEEF", payload=None, **extra_kwargs ): @@ -2002,22 +2007,26 @@ def test_download_as_text_w_custom_timeout(self): def test_download_as_text_w_if_etag_match_str(self): self._download_as_text_helper( - raw_download=False, if_etag_match="kittens", + raw_download=False, + if_etag_match="kittens", ) def test_download_as_text_w_if_etag_match_list(self): self._download_as_text_helper( - raw_download=False, if_etag_match=["kittens", "fluffy"], + raw_download=False, + if_etag_match=["kittens", "fluffy"], ) def test_download_as_text_w_if_etag_not_match_str(self): self._download_as_text_helper( - raw_download=False, if_etag_not_match="kittens", + raw_download=False, + if_etag_not_match="kittens", ) def test_download_as_text_w_if_etag_not_match_list(self): self._download_as_text_helper( - raw_download=False, if_etag_not_match=["kittens", "fluffy"], + raw_download=False, + if_etag_not_match=["kittens", "fluffy"], ) def test_download_as_text_w_if_generation_match(self): @@ -2035,16 +2044,18 @@ def test_download_as_text_w_if_metageneration_not_match(self): def test_download_as_text_w_encoding(self): encoding = "utf-16" self._download_as_text_helper( - raw_download=False, encoding=encoding, + raw_download=False, + encoding=encoding, ) def test_download_as_text_w_no_charset(self): self._download_as_text_helper( - raw_download=False, no_charset=True, + raw_download=False, + no_charset=True, ) def test_download_as_text_w_non_ascii_w_explicit_encoding(self): - expected_value = u"\x0AFe" + expected_value = "\x0AFe" encoding = "utf-16" charset = "latin1" payload = expected_value.encode(encoding) @@ -2057,7 +2068,7 @@ def test_download_as_text_w_non_ascii_w_explicit_encoding(self): ) def test_download_as_text_w_non_ascii_wo_explicit_encoding_w_charset(self): - expected_value = u"\x0AFe" + expected_value = "\x0AFe" charset = "utf-16" payload = expected_value.encode(charset) self._download_as_text_helper( @@ -2100,7 +2111,9 @@ def test_download_as_string(self, mock_warn): ) mock_warn.assert_called_once_with( - _DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2, + _DOWNLOAD_AS_STRING_DEPRECATED, + PendingDeprecationWarning, + stacklevel=2, ) @mock.patch("warnings.warn") @@ -2136,37 +2149,39 @@ def test_download_as_string_no_retry(self, mock_warn): ) mock_warn.assert_called_once_with( - _DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2, + _DOWNLOAD_AS_STRING_DEPRECATED, + PendingDeprecationWarning, + stacklevel=2, ) def test__get_content_type_explicit(self): - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) - content_type = u"text/plain" + content_type = "text/plain" return_value = blob._get_content_type(content_type) self.assertEqual(return_value, content_type) def test__get_content_type_from_blob(self): - blob = self._make_one(u"blob-name", bucket=None) - blob.content_type = u"video/mp4" + blob = self._make_one("blob-name", bucket=None) + blob.content_type = "video/mp4" return_value = blob._get_content_type(None) self.assertEqual(return_value, blob.content_type) def test__get_content_type_from_filename(self): - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) return_value = blob._get_content_type(None, filename="archive.tar") self.assertEqual(return_value, "application/x-tar") def test__get_content_type_default(self): - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) return_value = blob._get_content_type(None) - self.assertEqual(return_value, u"application/octet-stream") + self.assertEqual(return_value, "application/octet-stream") def test__get_writable_metadata_no_changes(self): - name = u"blob-name" + name = "blob-name" blob = self._make_one(name, bucket=None) object_metadata = blob._get_writable_metadata() @@ -2174,7 +2189,7 @@ def test__get_writable_metadata_no_changes(self): self.assertEqual(object_metadata, expected) def test__get_writable_metadata_with_changes(self): - name = u"blob-name" + name = "blob-name" blob = self._make_one(name, bucket=None) blob.storage_class = "NEARLINE" blob.cache_control = "max-age=3600" @@ -2190,7 +2205,7 @@ def test__get_writable_metadata_with_changes(self): self.assertEqual(object_metadata, expected) def test__get_writable_metadata_unwritable_field(self): - name = u"blob-name" + name = "blob-name" properties = {"updated": "2016-10-16T18:18:18.181Z"} blob = self._make_one(name, bucket=None, properties=properties) # Fake that `updated` is in changes. @@ -2201,7 +2216,7 @@ def test__get_writable_metadata_unwritable_field(self): self.assertEqual(object_metadata, expected) def test__set_metadata_to_none(self): - name = u"blob-name" + name = "blob-name" blob = self._make_one(name, bucket=None) blob.storage_class = "NEARLINE" blob.cache_control = "max-age=3600" @@ -2211,14 +2226,14 @@ def test__set_metadata_to_none(self): patch_prop.assert_called_once_with("metadata", None) def test__get_upload_arguments(self): - name = u"blob-name" + name = "blob-name" key = b"[pXw@,p@@AfBfrR3x-2b2SCHR,.?YwRO" client = mock.Mock(_connection=_Connection) client._connection.user_agent = "testing 1.2.3" blob = self._make_one(name, bucket=None, encryption_key=key) blob.content_disposition = "inline" - content_type = u"image/jpeg" + content_type = "image/jpeg" info = blob._get_upload_arguments(client, content_type) headers, object_metadata, new_content_type = info @@ -2265,7 +2280,7 @@ def _do_multipart_success( retry=None, ): bucket = _Bucket(name="w00t", user_project=user_project) - blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name) + blob = self._make_one("blob-name", bucket=bucket, kms_key_name=kms_key_name) self.assertIsNone(blob.chunk_size) if metadata: self.assertIsNone(blob.metadata) @@ -2289,7 +2304,7 @@ def _do_multipart_success( data = b"data here hear hier" stream = io.BytesIO(data) - content_type = u"application/xml" + content_type = "application/xml" if timeout is None: expected_timeout = self._get_default_timeout() @@ -2381,23 +2396,23 @@ def _do_multipart_success( "POST", upload_url, data=payload, headers=headers, timeout=expected_timeout ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, predefined_acl="private") - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_retry(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, predefined_acl="private", retry=DEFAULT_RETRY ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_num_retries(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, predefined_acl="private", num_retries=2 ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_retry_conflict(self, mock_get_boundary): with self.assertRaises(ValueError): self._do_multipart_success( @@ -2407,22 +2422,22 @@ def test__do_multipart_upload_no_size_retry_conflict(self, mock_get_boundary): retry=DEFAULT_RETRY, ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_mtls(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, predefined_acl="private", mtls=True ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, size=10) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_user_project(self, mock_get_boundary): user_project = "user-project-123" self._do_multipart_success(mock_get_boundary, user_project=user_project) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_kms(self, mock_get_boundary): kms_resource = ( "projects/test-project-123/" @@ -2432,7 +2447,7 @@ def test__do_multipart_upload_with_kms(self, mock_get_boundary): ) self._do_multipart_success(mock_get_boundary, kms_key_name=kms_resource) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_kms_with_version(self, mock_get_boundary): kms_resource = ( "projects/test-project-123/" @@ -2443,39 +2458,39 @@ def test__do_multipart_upload_with_kms_with_version(self, mock_get_boundary): ) self._do_multipart_success(mock_get_boundary, kms_key_name=kms_resource) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, retry=DEFAULT_RETRY) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_generation_match(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, if_generation_match=4, if_metageneration_match=4 ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_custom_timeout(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, timeout=9.58) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_generation_not_match(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, if_generation_not_match=4, if_metageneration_not_match=4 ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_client(self, mock_get_boundary): transport = self._mock_transport(http.client.OK, {}) client = mock.Mock(_http=transport, _connection=_Connection, spec=["_http"]) client._connection.API_BASE_URL = "https://storage.googleapis.com" self._do_multipart_success(mock_get_boundary, client=client) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_metadata(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, metadata={"test": "test"}) def test__do_multipart_upload_bad_size(self): - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) data = b"data here hear hier" stream = io.BytesIO(data) @@ -2515,7 +2530,7 @@ def _initiate_resumable_helper( from google.cloud.storage.blob import _DEFAULT_CHUNKSIZE bucket = _Bucket(name="whammy", user_project=user_project) - blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name) + blob = self._make_one("blob-name", bucket=bucket, kms_key_name=kms_key_name) if metadata: self.assertIsNone(blob.metadata) blob._properties["metadata"] = metadata @@ -2543,9 +2558,7 @@ def _initiate_resumable_helper( transport = self._mock_transport(http.client.OK, response_headers) # Create some mock arguments and call the method under test. - client = mock.Mock( - _http=transport, _connection=_Connection, spec=[u"_http"] - ) + client = mock.Mock(_http=transport, _connection=_Connection, spec=["_http"]) client._connection.API_BASE_URL = "https://storage.googleapis.com" # Mock get_api_base_url_for_mtls function. @@ -2557,7 +2570,7 @@ def _initiate_resumable_helper( data = b"hello hallo halo hi-low" stream = io.BytesIO(data) - content_type = u"text/plain" + content_type = "text/plain" if timeout is None: expected_timeout = self._get_default_timeout() @@ -2666,7 +2679,7 @@ def _initiate_resumable_helper( self.assertEqual(stream.tell(), 0) if metadata: - object_metadata = {"name": u"blob-name", "metadata": metadata} + object_metadata = {"name": "blob-name", "metadata": metadata} else: # Check the mocks. blob._get_writable_metadata.assert_called_once_with() @@ -2763,7 +2776,7 @@ def test__initiate_resumable_upload_with_client(self): response_headers = {"location": resumable_url} transport = self._mock_transport(http.client.OK, response_headers) - client = mock.Mock(_http=transport, _connection=_Connection, spec=[u"_http"]) + client = mock.Mock(_http=transport, _connection=_Connection, spec=["_http"]) client._connection.API_BASE_URL = "https://storage.googleapis.com" self._initiate_resumable_helper(client=client) @@ -2908,7 +2921,7 @@ def _do_resumable_helper( ): CHUNK_SIZE = 256 * 1024 USER_AGENT = "testing 1.2.3" - content_type = u"text/html" + content_type = "text/html" # Data to be uploaded. data = b"" + (b"A" * CHUNK_SIZE) + b"" total_bytes = len(data) @@ -2939,7 +2952,7 @@ def _do_resumable_helper( stream = io.BytesIO(data) bucket = _Bucket(name="yesterday") - blob = self._make_one(u"blob-name", bucket=bucket) + blob = self._make_one("blob-name", bucket=bucket) blob.chunk_size = blob._CHUNK_SIZE_MULTIPLE self.assertIsNotNone(blob.chunk_size) @@ -3059,10 +3072,10 @@ def _do_upload_helper( ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE - blob = self._make_one(u"blob-name", bucket=None) + blob = self._make_one("blob-name", bucket=None) # Create a fake response. - response = mock.Mock(spec=[u"json"]) + response = mock.Mock(spec=["json"]) response.json.return_value = mock.sentinel.json # Mock **both** helpers. blob._do_multipart_upload = mock.Mock(return_value=response, spec=[]) @@ -3076,7 +3089,7 @@ def _do_upload_helper( client = mock.sentinel.client stream = mock.sentinel.stream - content_type = u"video/mp4" + content_type = "video/mp4" if size is None: size = 12345654321 @@ -3196,7 +3209,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): data = b"data is here" stream = io.BytesIO(data) stream.seek(2) # Not at zero. - content_type = u"font/woff" + content_type = "font/woff" client = mock.sentinel.client predefined_acl = kwargs.get("predefined_acl", None) if_generation_match = kwargs.get("if_generation_match", None) @@ -3250,7 +3263,9 @@ def test_upload_from_file_w_num_retries(self, mock_warn): self._upload_from_file_helper(num_retries=2) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) @mock.patch("warnings.warn") @@ -3263,7 +3278,9 @@ def test_upload_from_file_with_retry_conflict(self, mock_warn): self._upload_from_file_helper(retry=DEFAULT_RETRY, num_retries=2) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) def test_upload_from_file_with_rewind(self): @@ -3336,7 +3353,7 @@ def test_upload_from_filename(self): self.assertIsNone(blob.metadata) data = b"soooo much data" - content_type = u"image/svg+xml" + content_type = "image/svg+xml" client = mock.sentinel.client with _NamedTemporaryFile() as temp: with open(temp.name, "wb") as file_obj: @@ -3367,7 +3384,7 @@ def test_upload_from_filename_with_retry(self): self.assertIsNone(blob.metadata) data = b"soooo much data" - content_type = u"image/svg+xml" + content_type = "image/svg+xml" client = mock.sentinel.client with _NamedTemporaryFile() as temp: with open(temp.name, "wb") as file_obj: @@ -3402,7 +3419,7 @@ def test_upload_from_filename_w_num_retries(self, mock_warn): self.assertIsNone(blob.metadata) data = b"soooo much data" - content_type = u"image/svg+xml" + content_type = "image/svg+xml" client = mock.sentinel.client with _NamedTemporaryFile() as temp: with open(temp.name, "wb") as file_obj: @@ -3425,7 +3442,9 @@ def test_upload_from_filename_w_num_retries(self, mock_warn): self.assertEqual(stream.name, temp.name) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) def test_upload_from_filename_w_custom_timeout(self): @@ -3439,7 +3458,7 @@ def test_upload_from_filename_w_custom_timeout(self): self.assertIsNone(blob.metadata) data = b"soooo much data" - content_type = u"image/svg+xml" + content_type = "image/svg+xml" client = mock.sentinel.client with _NamedTemporaryFile() as temp: with open(temp.name, "wb") as file_obj: @@ -3499,22 +3518,24 @@ def test_upload_from_string_w_bytes(self): self._upload_from_string_helper(data) def test_upload_from_string_w_text(self): - data = u"\N{snowman} \N{sailboat}" + data = "\N{snowman} \N{sailboat}" self._upload_from_string_helper(data) def test_upload_from_string_w_text_w_retry(self): - data = u"\N{snowman} \N{sailboat}" + data = "\N{snowman} \N{sailboat}" self._upload_from_string_helper(data, retry=DEFAULT_RETRY) @mock.patch("warnings.warn") def test_upload_from_string_with_num_retries(self, mock_warn): from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE - data = u"\N{snowman} \N{sailboat}" + data = "\N{snowman} \N{sailboat}" self._upload_from_string_helper(data, num_retries=2) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) def _create_resumable_upload_session_helper( @@ -3541,9 +3562,9 @@ def _create_resumable_upload_session_helper( transport.request.side_effect = side_effect # Create some mock arguments and call the method under test. - content_type = u"text/plain" + content_type = "text/plain" size = 10000 - client = mock.Mock(_http=transport, _connection=_Connection, spec=[u"_http"]) + client = mock.Mock(_http=transport, _connection=_Connection, spec=["_http"]) client._connection.API_BASE_URL = "https://storage.googleapis.com" client._connection.user_agent = "testing 1.2.3" @@ -3858,7 +3879,10 @@ def test_set_iam_policy_w_user_project_w_explicit_client_w_timeout_retry(self): retry = mock.Mock(spec=[]) returned = blob.set_iam_policy( - policy, client=client, timeout=timeout, retry=retry, + policy, + client=client, + timeout=timeout, + retry=retry, ) self.assertEqual(returned.etag, etag) @@ -4348,7 +4372,8 @@ def test_compose_w_generation_match(self): destination = self._make_one(destination_name, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=generation_number, + sources=[source_1, source_2], + if_generation_match=generation_number, ) expected_path = "/b/name/o/%s/compose" % destination_name @@ -4387,7 +4412,8 @@ def test_compose_w_if_generation_match_list_w_warning(self, mock_warn): destination = self._make_one(destination_name, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=generation_numbers, + sources=[source_1, source_2], + if_generation_match=generation_numbers, ) expected_path = "/b/name/o/%s/compose" % destination_name @@ -4421,7 +4447,9 @@ def test_compose_w_if_generation_match_list_w_warning(self, mock_warn): ) mock_warn.assert_called_with( - _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, + DeprecationWarning, + stacklevel=2, ) @mock.patch("warnings.warn") @@ -4449,7 +4477,9 @@ def test_compose_w_if_generation_match_and_if_s_generation_match(self, mock_warn client._post_resource.assert_not_called() mock_warn.assert_called_with( - _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, + DeprecationWarning, + stacklevel=2, ) @mock.patch("warnings.warn") @@ -4468,7 +4498,8 @@ def test_compose_w_if_metageneration_match_list_w_warning(self, mock_warn): destination = self._make_one(destination_name, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_metageneration_match=metageneration_number, + sources=[source_1, source_2], + if_metageneration_match=metageneration_number, ) expected_path = "/b/name/o/%s/compose" % destination_name @@ -4509,7 +4540,8 @@ def test_compose_w_metageneration_match(self): destination = self._make_one(destination_name, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_metageneration_match=metageneration_number, + sources=[source_1, source_2], + if_metageneration_match=metageneration_number, ) expected_path = "/b/name/o/%s/compose" % destination_name @@ -4883,13 +4915,13 @@ def test_update_storage_class_invalid(self): blob.rewrite = mock.Mock(spec=[]) with self.assertRaises(ValueError): - blob.update_storage_class(u"BOGUS") + blob.update_storage_class("BOGUS") blob.rewrite.assert_not_called() def _update_storage_class_multi_pass_helper(self, **kw): blob_name = "blob-name" - storage_class = u"NEARLINE" + storage_class = "NEARLINE" rewrite_token = "TOKEN" bytes_rewritten = 42 object_size = 84 @@ -5003,7 +5035,7 @@ def test_update_storage_class_multi_pass_w_retry(self): def _update_storage_class_single_pass_helper(self, **kw): blob_name = "blob-name" - storage_class = u"NEARLINE" + storage_class = "NEARLINE" object_size = 84 client = mock.Mock(spec=[]) bucket = _Bucket(client=client) @@ -5647,7 +5679,7 @@ def test_bytes(self): self.assertEqual(quoted, "%DE%AD%BE%EF") def test_unicode(self): - helicopter = u"\U0001f681" + helicopter = "\U0001f681" quoted = self._call_fut(helicopter) self.assertEqual(quoted, "%F0%9F%9A%81") @@ -5679,21 +5711,21 @@ def _call_fut(*args, **kwargs): return _maybe_rewind(*args, **kwargs) def test_default(self): - stream = mock.Mock(spec=[u"seek"]) + stream = mock.Mock(spec=["seek"]) ret_val = self._call_fut(stream) self.assertIsNone(ret_val) stream.seek.assert_not_called() def test_do_not_rewind(self): - stream = mock.Mock(spec=[u"seek"]) + stream = mock.Mock(spec=["seek"]) ret_val = self._call_fut(stream, rewind=False) self.assertIsNone(ret_val) stream.seek.assert_not_called() def test_do_rewind(self): - stream = mock.Mock(spec=[u"seek"]) + stream = mock.Mock(spec=["seek"]) ret_val = self._call_fut(stream, rewind=True) self.assertIsNone(ret_val) diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 122233b6e..c5f1df5d2 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -392,7 +392,8 @@ def test_ctor_explicit_pap(self): bucket = self._make_bucket() config = self._make_one( - bucket, public_access_prevention=PUBLIC_ACCESS_PREVENTION_ENFORCED, + bucket, + public_access_prevention=PUBLIC_ACCESS_PREVENTION_ENFORCED, ) self.assertIs(config.bucket, bucket) @@ -1200,7 +1201,9 @@ def test_list_notifications_w_explicit(self): retry = mock.Mock(spec=[]) iterator = bucket.list_notifications( - client=other_client, timeout=timeout, retry=retry, + client=other_client, + timeout=timeout, + retry=retry, ) self.assertIs(iterator, other_client._list_resource.return_value) @@ -1209,7 +1212,10 @@ def test_list_notifications_w_explicit(self): expected_path = "/b/{}/notificationConfigs".format(bucket_name) expected_item_to_value = _item_to_notification other_client._list_resource.assert_called_once_with( - expected_path, expected_item_to_value, timeout=timeout, retry=retry, + expected_path, + expected_item_to_value, + timeout=timeout, + retry=retry, ) def test_get_notification_miss_w_defaults(self): @@ -1262,7 +1268,9 @@ def test_get_notification_hit_w_explicit_w_user_project(self): bucket = self._make_one(client=client, name=name, user_project=user_project) notification = bucket.get_notification( - notification_id=notification_id, timeout=timeout, retry=retry, + notification_id=notification_id, + timeout=timeout, + retry=retry, ) self.assertIsInstance(notification, BucketNotification) @@ -1311,7 +1319,8 @@ def test_delete_hit_w_metageneration_match_w_explicit_client(self): bucket = self._make_one(client=None, name=name) result = bucket.delete( - client=client, if_metageneration_match=metageneration_number, + client=client, + if_metageneration_match=metageneration_number, ) self.assertIsNone(result) @@ -1348,7 +1357,11 @@ def test_delete_hit_w_force_w_user_project_w_explicit_timeout_retry(self): ) bucket.delete_blobs.assert_called_once_with( - [], on_error=mock.ANY, client=client, timeout=timeout, retry=retry, + [], + on_error=mock.ANY, + client=client, + timeout=timeout, + retry=retry, ) expected_query_params = {"userProject": user_project} @@ -1595,7 +1608,8 @@ def test_delete_blobs_w_generation_match_wrong_len(self): with self.assertRaises(ValueError): bucket.delete_blobs( - [blob_name, blob_name2], if_generation_not_match=[generation_number], + [blob_name, blob_name2], + if_generation_not_match=[generation_number], ) bucket.delete_blob.assert_not_called() @@ -1895,7 +1909,10 @@ def test_copy_blob_w_source_generation_w_timeout(self): timeout = 42 new_blob = source.copy_blob( - blob, dest, source_generation=generation, timeout=timeout, + blob, + dest, + source_generation=generation, + timeout=timeout, ) self.assertIs(new_blob.bucket, dest) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index d97d25390..2292c6acd 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -532,7 +532,10 @@ def test__list_resource_w_defaults(self): client = self._make_one(project=project, credentials=credentials) connection = client._base_connection = _make_connection() - iterator = client._list_resource(path=path, item_to_value=item_to_value,) + iterator = client._list_resource( + path=path, + item_to_value=item_to_value, + ) self.assertIsInstance(iterator, HTTPIterator) self.assertIs(iterator.client, client) @@ -1356,7 +1359,9 @@ def test_create_bucket_w_predefined_acl_valid_w_timeout(self): timeout = 42 bucket = client.create_bucket( - bucket_name, predefined_acl="publicRead", timeout=timeout, + bucket_name, + predefined_acl="publicRead", + timeout=timeout, ) expected_path = "/b" @@ -1398,7 +1403,9 @@ def test_create_bucket_w_predefined_default_object_acl_valid_w_retry(self): retry = mock.Mock(spec=[]) bucket = client.create_bucket( - bucket_name, predefined_default_object_acl="publicRead", retry=retry, + bucket_name, + predefined_default_object_acl="publicRead", + retry=retry, ) expected_path = "/b" @@ -1635,7 +1642,10 @@ def test_download_blob_to_file_w_no_retry(self): def test_download_blob_to_file_w_conditional_etag_match_string(self): self._download_blob_to_file_helper( - use_chunks=True, raw_download=True, retry=None, if_etag_match="kittens", + use_chunks=True, + raw_download=True, + retry=None, + if_etag_match="kittens", ) def test_download_blob_to_file_w_conditional_etag_match_list(self): @@ -1648,7 +1658,10 @@ def test_download_blob_to_file_w_conditional_etag_match_list(self): def test_download_blob_to_file_w_conditional_etag_not_match_string(self): self._download_blob_to_file_helper( - use_chunks=True, raw_download=True, retry=None, if_etag_not_match="kittens", + use_chunks=True, + raw_download=True, + retry=None, + if_etag_not_match="kittens", ) def test_download_blob_to_file_w_conditional_etag_not_match_list(self): @@ -2057,7 +2070,11 @@ def test_list_buckets_w_explicit(self): ) def _create_hmac_key_helper( - self, explicit_project=None, user_project=None, timeout=None, retry=None, + self, + explicit_project=None, + user_project=None, + timeout=None, + retry=None, ): import datetime from google.cloud._helpers import UTC diff --git a/tests/unit/test_fileio.py b/tests/unit/test_fileio.py index a6d4e7544..c0b2d1d70 100644 --- a/tests/unit/test_fileio.py +++ b/tests/unit/test_fileio.py @@ -25,7 +25,7 @@ TEST_TEXT_DATA = string.ascii_lowercase + "\n" + string.ascii_uppercase + "\n" TEST_BINARY_DATA = TEST_TEXT_DATA.encode("utf-8") -TEST_MULTIBYTE_TEXT_DATA = u"ใ‚ใ„ใ†ใˆใŠใ‹ใใใ‘ใ“ใ•ใ—ใ™ใ›ใใŸใกใคใฆใจ" +TEST_MULTIBYTE_TEXT_DATA = "ใ‚ใ„ใ†ใˆใŠใ‹ใใใ‘ใ“ใ•ใ—ใ™ใ›ใใŸใกใคใฆใจ" PLAIN_CONTENT_TYPE = "text/plain" NUM_RETRIES = 2 @@ -397,7 +397,9 @@ def test_write(self, mock_warn): self.assertEqual(upload.transmit_next_chunk.call_count, 5) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) def test_flush_fails(self): @@ -428,7 +430,9 @@ def test_conditional_retry_failure(self): # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. writer = self._make_blob_writer( - blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, ) # The transmit_next_chunk method must actually consume bytes from the @@ -609,7 +613,9 @@ def test_num_retries_and_retry_conflict(self, mock_warn): ) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, ) @mock.patch("warnings.warn") @@ -926,8 +932,8 @@ def test_write(self, mock_warn): # The transmit_next_chunk method must actually consume bytes from the # sliding buffer for the flush() feature to work properly. - upload.transmit_next_chunk.side_effect = lambda _: unwrapped_writer._buffer.read( - chunk_size + upload.transmit_next_chunk.side_effect = ( + lambda _: unwrapped_writer._buffer.read(chunk_size) ) # Write under chunk_size. This should be buffered and the upload not @@ -951,5 +957,7 @@ def test_write(self, mock_warn): upload.transmit_next_chunk.assert_called_with(transport) mock_warn.assert_called_once_with( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + _NUM_RETRIES_MESSAGE, + DeprecationWarning, + stacklevel=2, )