From 227dc280b16be443c18ea7ba8fd0e41a9ae17b48 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Thu, 29 Aug 2019 10:48:21 -0700 Subject: [PATCH] Reblacken. --- .coveragerc | 10 + .flake8 | 8 + MANIFEST.in | 5 +- google/__init__.py | 2 + google/resumable_media/__init__.py | 12 +- google/resumable_media/_download.py | 110 +++--- google/resumable_media/_helpers.py | 21 +- google/resumable_media/_upload.py | 204 +++++----- google/resumable_media/common.py | 9 +- google/resumable_media/requests/__init__.py | 10 +- google/resumable_media/requests/_helpers.py | 27 +- google/resumable_media/requests/download.py | 31 +- google/resumable_media/requests/upload.py | 70 +++- noxfile.py | 215 +++++------ setup.cfg | 4 +- setup.py | 58 ++- synth.metadata | 2 +- synth.py | 4 +- tests/system/requests/conftest.py | 23 +- tests/system/requests/test_download.py | 170 ++++----- tests/system/requests/test_upload.py | 312 ++++++++-------- tests/system/utils.py | 40 +- tests/unit/requests/test__helpers.py | 60 +-- tests/unit/requests/test_download.py | 214 ++++++----- tests/unit/requests/test_upload.py | 164 ++++---- tests/unit/test__download.py | 266 +++++++------ tests/unit/test__helpers.py | 63 ++-- tests/unit/test__upload.py | 395 ++++++++++---------- tests/unit/test_common.py | 16 +- 29 files changed, 1343 insertions(+), 1182 deletions(-) diff --git a/.coveragerc b/.coveragerc index 8ded7e0f..b178b094 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [run] branch = True @@ -7,3 +8,12 @@ show_missing = True exclude_lines = # Re-enable the standard pragma pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/.flake8 b/.flake8 index 25168dc8..0268ecc9 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,13 @@ +# Generated by synthtool. DO NOT EDIT! [flake8] +ignore = E203, E266, E501, W503 exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. __pycache__, .git, *.pyc, diff --git a/MANIFEST.in b/MANIFEST.in index 01511ce8..9cbf175a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,5 @@ include README.rst LICENSE -recursive-include google +recursive-include google *.json *.proto recursive-include tests * -global-exclude *.pyc __pycache__ +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/google/__init__.py b/google/__init__.py index 5286f31b..7c2cea5a 100644 --- a/google/__init__.py +++ b/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/resumable_media/__init__.py b/google/resumable_media/__init__.py index fbdae173..8c3da244 100644 --- a/google/resumable_media/__init__.py +++ b/google/resumable_media/__init__.py @@ -52,10 +52,10 @@ __all__ = [ - u'DataCorruption', - u'InvalidResponse', - u'PERMANENT_REDIRECT', - u'RetryStrategy', - u'TOO_MANY_REQUESTS', - u'UPLOAD_CHUNK_SIZE', + u"DataCorruption", + u"InvalidResponse", + u"PERMANENT_REDIRECT", + u"RetryStrategy", + u"TOO_MANY_REQUESTS", + u"UPLOAD_CHUNK_SIZE", ] diff --git a/google/resumable_media/_download.py b/google/resumable_media/_download.py index 71965992..5d2d10d1 100644 --- a/google/resumable_media/_download.py +++ b/google/resumable_media/_download.py @@ -24,11 +24,12 @@ _CONTENT_RANGE_RE = re.compile( - r'bytes (?P\d+)-(?P\d+)/(?P\d+)', - flags=re.IGNORECASE) + r"bytes (?P\d+)-(?P\d+)/(?P\d+)", + flags=re.IGNORECASE, +) _ACCEPTABLE_STATUS_CODES = (http_client.OK, http_client.PARTIAL_CONTENT) -_GET = u'GET' -_ZERO_CONTENT_RANGE_HEADER = u'bytes */0' +_GET = u"GET" +_ZERO_CONTENT_RANGE_HEADER = u"bytes */0" class DownloadBase(object): @@ -51,8 +52,7 @@ class DownloadBase(object): end (Optional[int]): The last byte in a range to be downloaded. """ - def __init__(self, media_url, stream=None, - start=None, end=None, headers=None): + def __init__(self, media_url, stream=None, start=None, end=None, headers=None): self.media_url = media_url self._stream = stream self.start = start @@ -78,7 +78,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") @staticmethod def _get_headers(response): @@ -90,7 +90,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") @staticmethod def _get_body(response): @@ -102,7 +102,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") class Download(DownloadBase): @@ -148,7 +148,7 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u'A download can only be used once.') + raise ValueError(u"A download can only be used once.") add_bytes_range(self.start, self.end, self._headers) return _GET, self.media_url, None, self._headers @@ -168,7 +168,8 @@ def _process_response(self, response): # Tombstone the current Download so it cannot be used again. self._finished = True _helpers.require_status_code( - response, _ACCEPTABLE_STATUS_CODES, self._get_status_code) + response, _ACCEPTABLE_STATUS_CODES, self._get_status_code + ) def consume(self, transport): """Consume the resource to be downloaded. @@ -183,7 +184,7 @@ def consume(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") class ChunkedDownload(DownloadBase): @@ -214,14 +215,14 @@ class ChunkedDownload(DownloadBase): ValueError: If ``start`` is negative. """ - def __init__(self, media_url, chunk_size, stream, - start=0, end=None, headers=None): + def __init__(self, media_url, chunk_size, stream, start=0, end=None, headers=None): if start < 0: raise ValueError( - u'On a chunked download the starting ' - u'value cannot be negative.') + u"On a chunked download the starting " u"value cannot be negative." + ) super(ChunkedDownload, self).__init__( - media_url, stream=stream, start=start, end=end, headers=headers) + media_url, stream=stream, start=start, end=end, headers=headers + ) self.chunk_size = chunk_size self._bytes_downloaded = 0 self._total_bytes = None @@ -290,9 +291,9 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u'Download has finished.') + raise ValueError(u"Download has finished.") if self.invalid: - raise ValueError(u'Download is invalid and cannot be re-used.') + raise ValueError(u"Download is invalid and cannot be re-used.") curr_start, curr_end = self._get_byte_range() add_bytes_range(curr_start, curr_end, self._headers) @@ -341,34 +342,44 @@ def _process_response(self, response): .. _sans-I/O: https://sans-io.readthedocs.io/ """ # Verify the response before updating the current instance. - if _check_for_zero_content_range(response, self._get_status_code, - self._get_headers): + if _check_for_zero_content_range( + response, self._get_status_code, self._get_headers + ): self._finished = True return _helpers.require_status_code( - response, _ACCEPTABLE_STATUS_CODES, - self._get_status_code, callback=self._make_invalid) + response, + _ACCEPTABLE_STATUS_CODES, + self._get_status_code, + callback=self._make_invalid, + ) headers = self._get_headers(response) response_body = self._get_body(response) start_byte, end_byte, total_bytes = get_range_info( - response, self._get_headers, callback=self._make_invalid) + response, self._get_headers, callback=self._make_invalid + ) - transfer_encoding = headers.get(u'transfer-encoding') + transfer_encoding = headers.get(u"transfer-encoding") if transfer_encoding is None: content_length = _helpers.header_required( - response, u'content-length', self._get_headers, - callback=self._make_invalid) + response, + u"content-length", + self._get_headers, + callback=self._make_invalid, + ) num_bytes = int(content_length) if len(response_body) != num_bytes: self._make_invalid() raise common.InvalidResponse( response, - u'Response is different size than content-length', - u'Expected', num_bytes, - u'Received', len(response_body), + u"Response is different size than content-length", + u"Expected", + num_bytes, + u"Received", + len(response_body), ) else: # 'content-length' header not allowed with chunked encoding. @@ -397,7 +408,7 @@ def consume_next_chunk(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") def add_bytes_range(start, end, headers): @@ -437,18 +448,18 @@ def add_bytes_range(start, end, headers): return else: # NOTE: This assumes ``end`` is non-negative. - bytes_range = u'0-{:d}'.format(end) + bytes_range = u"0-{:d}".format(end) else: if end is None: if start < 0: - bytes_range = u'{:d}'.format(start) + bytes_range = u"{:d}".format(start) else: - bytes_range = u'{:d}-'.format(start) + bytes_range = u"{:d}-".format(start) else: # NOTE: This is invalid if ``start < 0``. - bytes_range = u'{:d}-{:d}'.format(start, end) + bytes_range = u"{:d}-{:d}".format(start, end) - headers[_helpers.RANGE_HEADER] = u'bytes=' + bytes_range + headers[_helpers.RANGE_HEADER] = u"bytes=" + bytes_range def get_range_info(response, get_headers, callback=_helpers.do_nothing): @@ -470,19 +481,22 @@ def get_range_info(response, get_headers, callback=_helpers.do_nothing): ``bytes {start}-{end}/{total}``. """ content_range = _helpers.header_required( - response, _helpers.CONTENT_RANGE_HEADER, - get_headers, callback=callback) + response, _helpers.CONTENT_RANGE_HEADER, get_headers, callback=callback + ) match = _CONTENT_RANGE_RE.match(content_range) if match is None: callback() raise common.InvalidResponse( - response, u'Unexpected content-range header', content_range, - u'Expected to be of the form "bytes {start}-{end}/{total}"') + response, + u"Unexpected content-range header", + content_range, + u'Expected to be of the form "bytes {start}-{end}/{total}"', + ) return ( - int(match.group(u'start_byte')), - int(match.group(u'end_byte')), - int(match.group(u'total_bytes')) + int(match.group(u"start_byte")), + int(match.group(u"end_byte")), + int(match.group(u"total_bytes")), ) @@ -501,11 +515,13 @@ def _check_for_zero_content_range(response, get_status_code, get_headers): Returns: bool: True if content range total bytes is zero, false otherwise. """ - if get_status_code(response) == http_client. \ - REQUESTED_RANGE_NOT_SATISFIABLE: + if get_status_code(response) == http_client.REQUESTED_RANGE_NOT_SATISFIABLE: content_range = _helpers.header_required( - response, _helpers.CONTENT_RANGE_HEADER, - get_headers, callback=_helpers.do_nothing) + response, + _helpers.CONTENT_RANGE_HEADER, + get_headers, + callback=_helpers.do_nothing, + ) if content_range == _ZERO_CONTENT_RANGE_HEADER: return True return False diff --git a/google/resumable_media/_helpers.py b/google/resumable_media/_helpers.py index a947d3b7..7eee2ff8 100644 --- a/google/resumable_media/_helpers.py +++ b/google/resumable_media/_helpers.py @@ -23,8 +23,8 @@ from google.resumable_media import common -RANGE_HEADER = u'range' -CONTENT_RANGE_HEADER = u'content-range' +RANGE_HEADER = u"range" +CONTENT_RANGE_HEADER = u"content-range" RETRYABLE = ( common.TOO_MANY_REQUESTS, http_client.INTERNAL_SERVER_ERROR, @@ -61,13 +61,13 @@ def header_required(response, name, get_headers, callback=do_nothing): if name not in headers: callback() raise common.InvalidResponse( - response, u'Response headers must contain header', name) + response, u"Response headers must contain header", name + ) return headers[name] -def require_status_code(response, status_codes, get_status_code, - callback=do_nothing): +def require_status_code(response, status_codes, get_status_code, callback=do_nothing): """Require a response has a status code among a list. Args: @@ -89,8 +89,12 @@ def require_status_code(response, status_codes, get_status_code, if status_code not in status_codes: callback() raise common.InvalidResponse( - response, u'Request failed with status code', - status_code, u'Expected one of', *status_codes) + response, + u"Request failed with status code", + status_code, + u"Expected one of", + *status_codes + ) return status_code @@ -151,8 +155,7 @@ def wait_and_retry(func, get_status_code, retry_strategy): num_retries = 0 base_wait = 0.5 # When doubled will give 1.0 while retry_strategy.retry_allowed(total_sleep, num_retries): - base_wait, wait_time = calculate_retry_wait( - base_wait, retry_strategy.max_sleep) + base_wait, wait_time = calculate_retry_wait(base_wait, retry_strategy.max_sleep) num_retries += 1 total_sleep += wait_time time.sleep(wait_time) diff --git a/google/resumable_media/_upload.py b/google/resumable_media/_upload.py index f255bc4d..bd4b1971 100644 --- a/google/resumable_media/_upload.py +++ b/google/resumable_media/_upload.py @@ -36,28 +36,28 @@ from google.resumable_media import common -_CONTENT_TYPE_HEADER = u'content-type' -_CONTENT_RANGE_TEMPLATE = u'bytes {:d}-{:d}/{:d}' -_RANGE_UNKNOWN_TEMPLATE = u'bytes {:d}-{:d}/*' -_EMPTY_RANGE_TEMPLATE = u'bytes */{:d}' +_CONTENT_TYPE_HEADER = u"content-type" +_CONTENT_RANGE_TEMPLATE = u"bytes {:d}-{:d}/{:d}" +_RANGE_UNKNOWN_TEMPLATE = u"bytes {:d}-{:d}/*" +_EMPTY_RANGE_TEMPLATE = u"bytes */{:d}" _BOUNDARY_WIDTH = len(str(sys.maxsize - 1)) -_BOUNDARY_FORMAT = u'==============={{:0{:d}d}}=='.format(_BOUNDARY_WIDTH) -_MULTIPART_SEP = b'--' -_CRLF = b'\r\n' -_MULTIPART_BEGIN = ( - b'\r\ncontent-type: application/json; charset=UTF-8\r\n\r\n') +_BOUNDARY_FORMAT = u"==============={{:0{:d}d}}==".format(_BOUNDARY_WIDTH) +_MULTIPART_SEP = b"--" +_CRLF = b"\r\n" +_MULTIPART_BEGIN = b"\r\ncontent-type: application/json; charset=UTF-8\r\n\r\n" _RELATED_HEADER = b'multipart/related; boundary="' -_BYTES_RANGE_RE = re.compile( - r'bytes=0-(?P\d+)', flags=re.IGNORECASE) +_BYTES_RANGE_RE = re.compile(r"bytes=0-(?P\d+)", flags=re.IGNORECASE) _STREAM_ERROR_TEMPLATE = ( - u'Bytes stream is in unexpected state. ' - u'The local stream has had {:d} bytes read from it while ' - u'{:d} bytes have already been updated (they should match).') + u"Bytes stream is in unexpected state. " + u"The local stream has had {:d} bytes read from it while " + u"{:d} bytes have already been updated (they should match)." +) _STREAM_READ_PAST_TEMPLATE = ( - u'{:d} bytes have been read from the stream, which exceeds ' - u'the expected total {:d}.') -_POST = u'POST' -_PUT = u'PUT' + u"{:d} bytes have been read from the stream, which exceeds " + u"the expected total {:d}." +) +_POST = u"POST" +_PUT = u"PUT" class UploadBase(object): @@ -106,8 +106,7 @@ def _process_response(self, response): # Tombstone the current upload so it cannot be used again (in either # failure or success). self._finished = True - _helpers.require_status_code( - response, (http_client.OK,), self._get_status_code) + _helpers.require_status_code(response, (http_client.OK,), self._get_status_code) @staticmethod def _get_status_code(response): @@ -119,7 +118,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") @staticmethod def _get_headers(response): @@ -131,7 +130,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") @staticmethod def _get_body(response): @@ -143,7 +142,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") class SimpleUpload(UploadBase): @@ -192,10 +191,10 @@ def _prepare_request(self, data, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u'An upload can only be used once.') + raise ValueError(u"An upload can only be used once.") if not isinstance(data, six.binary_type): - raise TypeError(u'`data` must be bytes, received', type(data)) + raise TypeError(u"`data` must be bytes, received", type(data)) self._headers[_CONTENT_TYPE_HEADER] = content_type return _POST, self.upload_url, data, self._headers @@ -212,7 +211,7 @@ def transmit(self, transport, data, content_type): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") class MultipartUpload(UploadBase): @@ -264,12 +263,13 @@ def _prepare_request(self, data, metadata, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u'An upload can only be used once.') + raise ValueError(u"An upload can only be used once.") if not isinstance(data, six.binary_type): - raise TypeError(u'`data` must be bytes, received', type(data)) + raise TypeError(u"`data` must be bytes, received", type(data)) content, multipart_boundary = construct_multipart_request( - data, metadata, content_type) + data, metadata, content_type + ) multipart_content_type = _RELATED_HEADER + multipart_boundary + b'"' self._headers[_CONTENT_TYPE_HEADER] = multipart_content_type return _POST, self.upload_url, content, self._headers @@ -289,7 +289,7 @@ def transmit(self, transport, data, metadata, content_type): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") class ResumableUpload(UploadBase): @@ -319,8 +319,11 @@ class ResumableUpload(UploadBase): def __init__(self, upload_url, chunk_size, headers=None): super(ResumableUpload, self).__init__(upload_url, headers=headers) if chunk_size % resumable_media.UPLOAD_CHUNK_SIZE != 0: - raise ValueError(u'{} KB must divide chunk size'.format( - resumable_media.UPLOAD_CHUNK_SIZE / 1024)) + raise ValueError( + u"{} KB must divide chunk size".format( + resumable_media.UPLOAD_CHUNK_SIZE / 1024 + ) + ) self._chunk_size = chunk_size self._stream = None self._content_type = None @@ -367,8 +370,9 @@ def total_bytes(self): """ return self._total_bytes - def _prepare_initiate_request(self, stream, metadata, content_type, - total_bytes=None, stream_final=True): + def _prepare_initiate_request( + self, stream, metadata, content_type, total_bytes=None, stream_final=True + ): """Prepare the contents of HTTP request to initiate upload. This is everything that must be done before a request that doesn't @@ -406,15 +410,15 @@ def _prepare_initiate_request(self, stream, metadata, content_type, .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.resumable_url is not None: - raise ValueError(u'This upload has already been initiated.') + raise ValueError(u"This upload has already been initiated.") if stream.tell() != 0: - raise ValueError(u'Stream must be at beginning.') + raise ValueError(u"Stream must be at beginning.") self._stream = stream self._content_type = content_type headers = { - _CONTENT_TYPE_HEADER: u'application/json; charset=UTF-8', - u'x-upload-content-type': content_type, + _CONTENT_TYPE_HEADER: u"application/json; charset=UTF-8", + u"x-upload-content-type": content_type, } # Set the total bytes if possible. if total_bytes is not None: @@ -423,11 +427,11 @@ def _prepare_initiate_request(self, stream, metadata, content_type, self._total_bytes = get_total_bytes(stream) # Add the total bytes to the headers if set. if self._total_bytes is not None: - content_length = u'{:d}'.format(self._total_bytes) - headers[u'x-upload-content-length'] = content_length + content_length = u"{:d}".format(self._total_bytes) + headers[u"x-upload-content-length"] = content_length headers.update(self._headers) - payload = json.dumps(metadata).encode(u'utf-8') + payload = json.dumps(metadata).encode(u"utf-8") return _POST, self.upload_url, payload, headers def _process_initiate_response(self, response): @@ -453,10 +457,18 @@ def _process_initiate_response(self, response): callback=self._make_invalid, ) self._resumable_url = _helpers.header_required( - response, u'location', self._get_headers) + response, u"location", self._get_headers + ) - def initiate(self, transport, stream, metadata, content_type, - total_bytes=None, stream_final=True): + def initiate( + self, + transport, + stream, + metadata, + content_type, + total_bytes=None, + stream_final=True, + ): """Initiate a resumable upload. By default, this method assumes your ``stream`` is in a "final" @@ -491,7 +503,7 @@ def initiate(self, transport, stream, metadata, content_type, Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") def _prepare_request(self): """Prepare the contents of HTTP request to upload a chunk. @@ -524,20 +536,22 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u'Upload has finished.') + raise ValueError(u"Upload has finished.") if self.invalid: raise ValueError( - u'Upload is in an invalid state. To recover call `recover()`.') + u"Upload is in an invalid state. To recover call `recover()`." + ) if self.resumable_url is None: raise ValueError( - u'This upload has not been initiated. Please call ' - u'initiate() before beginning to transmit chunks.') + u"This upload has not been initiated. Please call " + u"initiate() before beginning to transmit chunks." + ) start_byte, payload, content_range = get_next_chunk( - self._stream, self._chunk_size, self._total_bytes) + self._stream, self._chunk_size, self._total_bytes + ) if start_byte != self.bytes_uploaded: - msg = _STREAM_ERROR_TEMPLATE.format( - start_byte, self.bytes_uploaded) + msg = _STREAM_ERROR_TEMPLATE.format(start_byte, self.bytes_uploaded) raise ValueError(msg) headers = { @@ -577,8 +591,11 @@ def _process_response(self, response, bytes_sent): .. _sans-I/O: https://sans-io.readthedocs.io/ """ status_code = _helpers.require_status_code( - response, (http_client.OK, resumable_media.PERMANENT_REDIRECT), - self._get_status_code, callback=self._make_invalid) + response, + (http_client.OK, resumable_media.PERMANENT_REDIRECT), + self._get_status_code, + callback=self._make_invalid, + ) if status_code == http_client.OK: # NOTE: We use the "local" information of ``bytes_sent`` to update # ``bytes_uploaded``, but do not verify this against other @@ -593,15 +610,21 @@ def _process_response(self, response, bytes_sent): self._finished = True else: bytes_range = _helpers.header_required( - response, _helpers.RANGE_HEADER, - self._get_headers, callback=self._make_invalid) + response, + _helpers.RANGE_HEADER, + self._get_headers, + callback=self._make_invalid, + ) match = _BYTES_RANGE_RE.match(bytes_range) if match is None: self._make_invalid() raise common.InvalidResponse( - response, u'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"') - self._bytes_uploaded = int(match.group(u'end_byte')) + 1 + response, + u'Unexpected "range" header', + bytes_range, + u'Expected to be of the form "bytes=0-{end}"', + ) + self._bytes_uploaded = int(match.group(u"end_byte")) + 1 def transmit_next_chunk(self, transport): """Transmit the next chunk of the resource to be uploaded. @@ -618,7 +641,7 @@ def transmit_next_chunk(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") def _prepare_recover_request(self): """Prepare the contents of HTTP request to recover from failure. @@ -646,10 +669,9 @@ def _prepare_recover_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if not self.invalid: - raise ValueError( - u'Upload is not in invalid state, no need to recover.') + raise ValueError(u"Upload is not in invalid state, no need to recover.") - headers = {_helpers.CONTENT_RANGE_HEADER: u'bytes */*'} + headers = {_helpers.CONTENT_RANGE_HEADER: u"bytes */*"} return _PUT, self.resumable_url, None, headers def _process_recover_response(self, response): @@ -672,17 +694,20 @@ def _process_recover_response(self, response): .. _sans-I/O: https://sans-io.readthedocs.io/ """ _helpers.require_status_code( - response, (resumable_media.PERMANENT_REDIRECT,), - self._get_status_code) + response, (resumable_media.PERMANENT_REDIRECT,), self._get_status_code + ) headers = self._get_headers(response) if _helpers.RANGE_HEADER in headers: bytes_range = headers[_helpers.RANGE_HEADER] match = _BYTES_RANGE_RE.match(bytes_range) if match is None: raise common.InvalidResponse( - response, u'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"') - self._bytes_uploaded = int(match.group(u'end_byte')) + 1 + response, + u'Unexpected "range" header', + bytes_range, + u'Expected to be of the form "bytes=0-{end}"', + ) + self._bytes_uploaded = int(match.group(u"end_byte")) + 1 else: # In this case, the upload has not "begun". self._bytes_uploaded = 0 @@ -707,7 +732,7 @@ def recover(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u'This implementation is virtual.') + raise NotImplementedError(u"This implementation is virtual.") def get_boundary(): @@ -720,7 +745,7 @@ def get_boundary(): boundary = _BOUNDARY_FORMAT.format(random_int) # NOTE: Neither % formatting nor .format() are available for byte strings # in Python 3.4, so we must use unicode strings as templates. - return boundary.encode(u'utf-8') + return boundary.encode(u"utf-8") def construct_multipart_request(data, metadata, content_type): @@ -739,20 +764,27 @@ def construct_multipart_request(data, metadata, content_type): between each part. """ multipart_boundary = get_boundary() - json_bytes = json.dumps(metadata).encode(u'utf-8') - content_type = content_type.encode(u'utf-8') + json_bytes = json.dumps(metadata).encode(u"utf-8") + content_type = content_type.encode(u"utf-8") # Combine the two parts into a multipart payload. # NOTE: We'd prefer a bytes template but are restricted by Python 3.4. boundary_sep = _MULTIPART_SEP + multipart_boundary content = ( - boundary_sep + - _MULTIPART_BEGIN + - json_bytes + _CRLF + - boundary_sep + _CRLF + - b'content-type: ' + content_type + _CRLF + - _CRLF + # Empty line between headers and body. - data + _CRLF + - boundary_sep + _MULTIPART_SEP) + boundary_sep + + _MULTIPART_BEGIN + + json_bytes + + _CRLF + + boundary_sep + + _CRLF + + b"content-type: " + + content_type + + _CRLF + + _CRLF + + data # Empty line between headers and body. + + _CRLF + + boundary_sep + + _MULTIPART_SEP + ) return content, multipart_boundary @@ -821,11 +853,13 @@ def get_next_chunk(stream, chunk_size, total_bytes): # stream to be at the beginning. if num_bytes_read != 0: raise ValueError( - u'Stream specified as empty, but produced non-empty content.') + u"Stream specified as empty, but produced non-empty content." + ) else: if num_bytes_read == 0: raise ValueError( - u'Stream is already exhausted. There is no content remaining.') + u"Stream is already exhausted. There is no content remaining." + ) if end_byte >= total_bytes: msg = _STREAM_READ_PAST_TEMPLATE.format(end_byte + 1, total_bytes) @@ -855,10 +889,8 @@ def get_content_range(start_byte, end_byte, total_bytes): str: The content range header. """ if total_bytes is None: - return _RANGE_UNKNOWN_TEMPLATE.format( - start_byte, end_byte) + return _RANGE_UNKNOWN_TEMPLATE.format(start_byte, end_byte) elif end_byte < start_byte: return _EMPTY_RANGE_TEMPLATE.format(total_bytes) else: - return _CONTENT_RANGE_TEMPLATE.format( - start_byte, end_byte, total_bytes) + return _CONTENT_RANGE_TEMPLATE.format(start_byte, end_byte, total_bytes) diff --git a/google/resumable_media/common.py b/google/resumable_media/common.py index 9aef58d5..a30d8eb1 100644 --- a/google/resumable_media/common.py +++ b/google/resumable_media/common.py @@ -19,8 +19,8 @@ _SLEEP_RETRY_ERROR_MSG = ( - u'At most one of `max_cumulative_retry` and `max_retries` ' - u'can be specified.') + u"At most one of `max_cumulative_retry` and `max_retries` " u"can be specified." +) UPLOAD_CHUNK_SIZE = 262144 # 256 * 1024 """int: Chunks in a resumable upload must come in multiples of 256 KB.""" @@ -117,8 +117,9 @@ class RetryStrategy(object): are passed. """ - def __init__(self, max_sleep=MAX_SLEEP, max_cumulative_retry=None, - max_retries=None): + def __init__( + self, max_sleep=MAX_SLEEP, max_cumulative_retry=None, max_retries=None + ): if max_cumulative_retry is not None and max_retries is not None: raise ValueError(_SLEEP_RETRY_ERROR_MSG) if max_cumulative_retry is None and max_retries is None: diff --git a/google/resumable_media/requests/__init__.py b/google/resumable_media/requests/__init__.py index 2000e163..e2d14015 100644 --- a/google/resumable_media/requests/__init__.py +++ b/google/resumable_media/requests/__init__.py @@ -666,9 +666,9 @@ def SimpleUpload(*args, **kwargs): __all__ = [ - u'ChunkedDownload', - u'Download', - u'MultipartUpload', - u'ResumableUpload', - u'SimpleUpload', + u"ChunkedDownload", + u"Download", + u"MultipartUpload", + u"ResumableUpload", + u"SimpleUpload", ] diff --git a/google/resumable_media/requests/_helpers.py b/google/resumable_media/requests/_helpers.py index c4c3d72c..80cf4542 100644 --- a/google/resumable_media/requests/_helpers.py +++ b/google/resumable_media/requests/_helpers.py @@ -77,15 +77,22 @@ def _get_body(response): bytes: The body of the ``response``. """ if response._content is False: - response._content = b''.join( - response.raw.stream( - _SINGLE_GET_CHUNK_SIZE, decode_content=False)) + response._content = b"".join( + response.raw.stream(_SINGLE_GET_CHUNK_SIZE, decode_content=False) + ) response._content_consumed = True return response._content -def http_request(transport, method, url, data=None, headers=None, - retry_strategy=_DEFAULT_RETRY_STRATEGY, **transport_kwargs): +def http_request( + transport, + method, + url, + data=None, + headers=None, + retry_strategy=_DEFAULT_RETRY_STRATEGY, + **transport_kwargs +): """Make an HTTP request. Args: @@ -107,11 +114,9 @@ def http_request(transport, method, url, data=None, headers=None, ~requests.Response: The return value of ``transport.request()``. """ if "timeout" not in transport_kwargs: - transport_kwargs["timeout"] = ( - _DEFAULT_CONNECT_TIMEOUT, _DEFAULT_READ_TIMEOUT) + transport_kwargs["timeout"] = (_DEFAULT_CONNECT_TIMEOUT, _DEFAULT_READ_TIMEOUT) func = functools.partial( - transport.request, method, url, data=data, headers=headers, - **transport_kwargs) - return _helpers.wait_and_retry( - func, RequestsMixin._get_status_code, retry_strategy) + transport.request, method, url, data=data, headers=headers, **transport_kwargs + ) + return _helpers.wait_and_retry(func, RequestsMixin._get_status_code, retry_strategy) diff --git a/google/resumable_media/requests/download.py b/google/resumable_media/requests/download.py index 590c636e..a41ff623 100644 --- a/google/resumable_media/requests/download.py +++ b/google/resumable_media/requests/download.py @@ -24,7 +24,7 @@ _LOGGER = logging.getLogger(__name__) -_HASH_HEADER = u'x-goog-hash' +_HASH_HEADER = u"x-goog-hash" _MISSING_MD5 = u"""\ No MD5 checksum was returned from the service while downloading {} (which happens for composite objects), so client-side content integrity @@ -81,8 +81,7 @@ def _get_expected_md5(self, response): can be detected from the ``X-Goog-Hash`` header. """ headers = self._get_headers(response) - expected_md5_hash = _parse_md5_header( - headers.get(_HASH_HEADER), response) + expected_md5_hash = _parse_md5_header(headers.get(_HASH_HEADER), response) if expected_md5_hash is None: msg = _MISSING_MD5.format(self.media_url) @@ -115,7 +114,8 @@ def _write_to_stream(self, response): # NOTE: This might "donate" ``md5_hash`` to the decoder and replace # it with a ``_DoNothingHash``. body_iter = response.raw.stream( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) for chunk in body_iter: self._stream.write(chunk) md5_hash.update(chunk) @@ -127,10 +127,11 @@ def _write_to_stream(self, response): actual_md5_hash = base64.b64encode(md5_hash.digest()) # NOTE: ``b64encode`` returns ``bytes``, but ``expected_md5_hash`` # came from a header, so it will be ``str``. - actual_md5_hash = actual_md5_hash.decode(u'utf-8') + actual_md5_hash = actual_md5_hash.decode(u"utf-8") if actual_md5_hash != expected_md5_hash: msg = _CHECKSUM_MISMATCH.format( - self.media_url, expected_md5_hash, actual_md5_hash) + self.media_url, expected_md5_hash, actual_md5_hash + ) raise common.DataCorruption(response, msg) def consume(self, transport): @@ -216,8 +217,14 @@ def consume_next_chunk(self, transport): method, url, payload, headers = self._prepare_request() # NOTE: We assume "payload is None" but pass it along anyway. response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy, stream=True) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + stream=True, + ) self._process_response(response) return response @@ -254,9 +261,9 @@ def _parse_md5_header(header_value, response): return None matches = [] - for checksum in header_value.split(u','): - name, value = checksum.split(u'=', 1) - if name == u'md5': + for checksum in header_value.split(u","): + name, value = checksum.split(u"=", 1) + if name == u"md5": matches.append(value) if len(matches) == 0: @@ -266,7 +273,7 @@ def _parse_md5_header(header_value, response): else: raise common.InvalidResponse( response, - u'X-Goog-Hash header had multiple ``md5`` values.', + u"X-Goog-Hash header had multiple ``md5`` values.", header_value, matches, ) diff --git a/google/resumable_media/requests/upload.py b/google/resumable_media/requests/upload.py index eaef2a01..0a893a43 100644 --- a/google/resumable_media/requests/upload.py +++ b/google/resumable_media/requests/upload.py @@ -51,11 +51,15 @@ def transmit(self, transport, data, content_type): Returns: ~requests.Response: The HTTP response returned by ``transport``. """ - method, url, payload, headers = self._prepare_request( - data, content_type) + method, url, payload, headers = self._prepare_request(data, content_type) response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + ) self._process_response(response) return response @@ -91,10 +95,16 @@ def transmit(self, transport, data, metadata, content_type): ~requests.Response: The HTTP response returned by ``transport``. """ method, url, payload, headers = self._prepare_request( - data, metadata, content_type) + data, metadata, content_type + ) response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + ) self._process_response(response) return response @@ -282,8 +292,15 @@ class ResumableUpload(_helpers.RequestsMixin, _upload.ResumableUpload): :data:`.UPLOAD_CHUNK_SIZE`. """ - def initiate(self, transport, stream, metadata, content_type, - total_bytes=None, stream_final=True): + def initiate( + self, + transport, + stream, + metadata, + content_type, + total_bytes=None, + stream_final=True, + ): """Initiate a resumable upload. By default, this method assumes your ``stream`` is in a "final" @@ -319,11 +336,20 @@ def initiate(self, transport, stream, metadata, content_type, ~requests.Response: The HTTP response returned by ``transport``. """ method, url, payload, headers = self._prepare_initiate_request( - stream, metadata, content_type, - total_bytes=total_bytes, stream_final=stream_final) + stream, + metadata, + content_type, + total_bytes=total_bytes, + stream_final=stream_final, + ) response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + ) self._process_initiate_response(response) return response @@ -391,8 +417,13 @@ def transmit_next_chunk(self, transport): """ method, url, payload, headers = self._prepare_request() response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + ) self._process_response(response, len(payload)) return response @@ -416,7 +447,12 @@ def recover(self, transport): method, url, payload, headers = self._prepare_recover_request() # NOTE: We assume "payload is None" but pass it along anyway. response = _helpers.http_request( - transport, method, url, data=payload, headers=headers, - retry_strategy=self._retry_strategy) + transport, + method, + url, + data=payload, + headers=headers, + retry_strategy=self._retry_strategy, + ) self._process_recover_response(response) return response diff --git a/noxfile.py b/noxfile.py index 1c1c075d..f1b90137 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2017 Google Inc. +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,142 +14,147 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Generated by synthtool. DO NOT EDIT! + from __future__ import absolute_import import os +import shutil import nox -SYSTEM_TEST_ENV_VARS = ( - 'GOOGLE_APPLICATION_CREDENTIALS', -) -GOOGLE_AUTH = 'google-auth >= 0.10.0' - - -@nox.session(python=['2.7', '3.4', '3.5', '3.6', '3.7']) -def unit_tests(session): - """Run the unit test suite.""" +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_VERSION = "black==19.3b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov') - session.install('-e', '.[requests]') +if os.path.exists("samples"): + BLACK_PATHS.append("samples") - # Run py.test against the unit tests. - # NOTE: We don't require 100% line coverage for unit test runs since - # some have branches that are Py2/Py3 specific. - line_coverage = '--cov-fail-under=99' - session.run( - 'py.test', - '--cov=google.resumable_media', - '--cov=tests.unit', - '--cov-append', - '--cov-config=.coveragerc', - '--cov-report=', - line_coverage, - os.path.join('tests', 'unit'), - *session.posargs - ) +@nox.session(python="3.7") +def lint(session): + """Run linters. -@nox.session(python='3.6') -def docs(session): - """Build the docs.""" - - # Install Sphinx and other dependencies. - session.chdir(os.path.realpath(os.path.dirname(__file__))) - session.install( - 'Sphinx == 2.1.2', - 'sphinx_rtd_theme == 0.4.3', - 'sphinx-docstring-typing >= 0.0.3', - ) - session.install('-e', '.[requests]') - - # Build the docs! - session.run('bash', os.path.join('scripts', 'build_docs.sh')) - - -@nox.session(python='3.6') -def doctest(session): - """Run the doctests.""" - # Install Sphinx and other dependencies. - session.chdir(os.path.realpath(os.path.dirname(__file__))) - session.install( - 'sphinx', - 'sphinx_rtd_theme', - 'sphinx-docstring-typing >= 0.0.3', - 'mock', - GOOGLE_AUTH, - ) - session.install('-e', '.[requests]') + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.run("black", "--check", *BLACK_PATHS) + session.run("flake8", "google", "tests") - # Run the doctests with Sphinx. - session.run( - 'sphinx-build', '-W', '-b', 'doctest', - '-d', os.path.join('docs_build', 'build', 'doctrees'), - 'docs_build', os.path.join('docs_build', 'doctest'), - ) +@nox.session(python="3.6") +def blacken(session): + """Run black. -@nox.session(python='3.6') -def lint(session): - """Run flake8. + Format code to uniform standard. - Returns a failure if flake8 finds linting errors or sufficiently - serious code quality issues. + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ - session.install('flake8') - session.install('-e', '.') - session.run( - 'flake8', - os.path.join('google', 'resumable_media'), - 'tests', - ) + session.install(BLACK_VERSION) + session.run("black", *BLACK_PATHS) -@nox.session(python='3.6') +@nox.session(python="3.7") def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.install('docutils', 'Pygments') + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", ".") + + # Run py.test against the unit tests. session.run( - 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +def unit(session): + """Run the unit test suite.""" + default(session) -@nox.session(python=['2.7', '3.6']) -def system_tests(session): +@nox.session(python=["2.7", "3.7"]) +def system(session): """Run the system test suite.""" + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") - # Sanity check: environment variables are set. - missing = [] - for env_var in SYSTEM_TEST_ENV_VARS: - if env_var not in os.environ: - missing.append(env_var) + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") - # Only run system tests if the environment variables are set. - if missing: - all_vars = ', '.join(missing) - msg = 'Environment variable(s) unset: {}'.format(all_vars) - session.skip(msg) + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") # Install all test dependencies, then install this package into the - # virutalenv's dist-packages. - session.install('mock', 'pytest', GOOGLE_AUTH) - session.install('-e', '.[requests]') + # virtualenv's dist-packages. + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") # Run py.test against the system tests. - session.run( - 'py.test', - os.path.join('tests', 'system'), - *session.posargs - ) + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python='3.6') +@nox.session(python="3.7") def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=") + + session.run("coverage", "erase") + + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/setup.cfg b/setup.cfg index 79874b74..3bd55550 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 - -[tool:pytest] -addopts = --tb=native diff --git a/setup.py b/setup.py index d308f705..ec373c97 100644 --- a/setup.py +++ b/setup.py @@ -19,49 +19,43 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: +with open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() -REQUIREMENTS = [ - 'six', -] -EXTRAS_REQUIRE = { - 'requests': [ - 'requests >= 2.18.0, < 3.0.0dev', - ], -} +REQUIREMENTS = ["six"] +EXTRAS_REQUIRE = {"requests": ["requests >= 2.18.0, < 3.0.0dev"]} setuptools.setup( - name='google-resumable-media', - version = '0.3.3', - description='Utilities for Google Media Downloads and Resumable Uploads', - author='Google Cloud Platform', - author_email='googleapis-publisher@google.com', + name="google-resumable-media", + version="0.3.3", + description="Utilities for Google Media Downloads and Resumable Uploads", + author="Google Cloud Platform", + author_email="googleapis-publisher@google.com", long_description=README, - namespace_packages=['google'], + namespace_packages=["google"], scripts=[], - url='https://github.com/GoogleCloudPlatform/google-resumable-media-python', - packages=setuptools.find_packages(exclude=('tests*',)), - license='Apache 2.0', - platforms='Posix; MacOS X; Windows', + url="https://github.com/GoogleCloudPlatform/google-resumable-media-python", + packages=setuptools.find_packages(exclude=("tests*",)), + license="Apache 2.0", + platforms="Posix; MacOS X; Windows", include_package_data=True, zip_safe=False, install_requires=REQUIREMENTS, extras_require=EXTRAS_REQUIRE, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Internet', + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Internet", ], ) diff --git a/synth.metadata b/synth.metadata index aae8e4f7..193373be 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-08-28T20:51:06.722996Z", + "updateTime": "2019-08-29T17:47:45.352258Z", "sources": [ { "template": { diff --git a/synth.py b/synth.py index 489efdc2..119d4311 100644 --- a/synth.py +++ b/synth.py @@ -6,9 +6,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- - -intersphinx = {"requests": "https://2.python-requests.org/en/master/"} -templated_files = common.py_library(intersphinx_dependencies=intersphinx) +templated_files = common.py_library() s.move(templated_files) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/tests/system/requests/conftest.py b/tests/system/requests/conftest.py index e5dd0475..a1bd38ee 100644 --- a/tests/system/requests/conftest.py +++ b/tests/system/requests/conftest.py @@ -24,35 +24,32 @@ def ensure_bucket(transport): get_response = transport.get(utils.BUCKET_URL) if get_response.status_code == 404: credentials = transport.credentials - query_params = { - 'project': credentials.project_id, - } - payload = { - 'name': utils.BUCKET_NAME, - } + query_params = {"project": credentials.project_id} + payload = {"name": utils.BUCKET_NAME} post_response = transport.post( - utils.BUCKET_POST_URL, params=query_params, json=payload) + utils.BUCKET_POST_URL, params=query_params, json=payload + ) if not post_response.ok: - raise ValueError("{}: {}".format( - post_response.status_code, post_response.reason)) + raise ValueError( + "{}: {}".format(post_response.status_code, post_response.reason) + ) def cleanup_bucket(transport): del_response = transport.delete(utils.BUCKET_URL) if not del_response.ok: - raise ValueError("{}: {}".format( - del_response.status_code, del_response.reason)) + raise ValueError("{}: {}".format(del_response.status_code, del_response.reason)) -@pytest.fixture(scope=u'session') +@pytest.fixture(scope=u"session") def authorized_transport(): credentials, _ = google.auth.default(scopes=(utils.GCS_RW_SCOPE,)) yield tr_requests.AuthorizedSession(credentials) -@pytest.fixture(scope=u'session') +@pytest.fixture(scope=u"session") def bucket(authorized_transport): ensure_bucket(authorized_transport) diff --git a/tests/system/requests/test_download.py b/tests/system/requests/test_download.py index 76e114bc..9314dcb6 100644 --- a/tests/system/requests/test_download.py +++ b/tests/system/requests/test_download.py @@ -32,53 +32,50 @@ CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u'..', u'..', u'data') -PLAIN_TEXT = u'text/plain' -IMAGE_JPEG = u'image/jpeg' +DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") +PLAIN_TEXT = u"text/plain" +IMAGE_JPEG = u"image/jpeg" ALL_FILES = ( { - u'path': os.path.realpath(os.path.join(DATA_DIR, u'image1.jpg')), - u'content_type': IMAGE_JPEG, - u'checksum': u'1bsd83IYNug8hd+V1ING3Q==', - u'slices': ( + u"path": os.path.realpath(os.path.join(DATA_DIR, u"image1.jpg")), + u"content_type": IMAGE_JPEG, + u"checksum": u"1bsd83IYNug8hd+V1ING3Q==", + u"slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] slice(262144, None, None), # obj[262144:] ), - }, { - u'path': os.path.realpath(os.path.join(DATA_DIR, u'image2.jpg')), - u'content_type': IMAGE_JPEG, - u'checksum': u'gdLXJltiYAMP9WZZFEQI1Q==', - u'slices': ( + }, + { + u"path": os.path.realpath(os.path.join(DATA_DIR, u"image2.jpg")), + u"content_type": IMAGE_JPEG, + u"checksum": u"gdLXJltiYAMP9WZZFEQI1Q==", + u"slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] slice(262144, None, None), # obj[262144:] ), - }, { - u'path': os.path.realpath(os.path.join(DATA_DIR, u'file.txt')), - u'content_type': PLAIN_TEXT, - u'checksum': u'XHSHAr/SpIeZtZbjgQ4nGw==', - u'slices': (), - }, { - u'path': os.path.realpath(os.path.join(DATA_DIR, u'gzipped.txt.gz')), - u'content_type': PLAIN_TEXT, - u'checksum': u'KHRs/+ZSrc/FuuR4qz/PZQ==', - u'slices': (), - u'metadata': { - u'contentEncoding': u'gzip', - }, + }, + { + u"path": os.path.realpath(os.path.join(DATA_DIR, u"file.txt")), + u"content_type": PLAIN_TEXT, + u"checksum": u"XHSHAr/SpIeZtZbjgQ4nGw==", + u"slices": (), + }, + { + u"path": os.path.realpath(os.path.join(DATA_DIR, u"gzipped.txt.gz")), + u"content_type": PLAIN_TEXT, + u"checksum": u"KHRs/+ZSrc/FuuR4qz/PZQ==", + u"slices": (), + u"metadata": {u"contentEncoding": u"gzip"}, }, ) -ENCRYPTED_ERR = ( - b'The target object is encrypted by a customer-supplied encryption key.') -NO_BODY_ERR = ( - u'The content for this response was already consumed') +ENCRYPTED_ERR = b"The target object is encrypted by a customer-supplied encryption key." +NO_BODY_ERR = u"The content for this response was already consumed" NOT_FOUND_ERR = ( - b'No such object: ' + - utils.BUCKET_NAME.encode('utf-8') + - b'/does-not-exist.txt' + b"No such object: " + utils.BUCKET_NAME.encode("utf-8") + b"/does-not-exist.txt" ) @@ -99,20 +96,19 @@ class CorruptingAuthorizedSession(tr_requests.AuthorizedSession): constructor. """ - EMPTY_HASH = base64.b64encode( - hashlib.md5(b'').digest()).decode(u'utf-8') + EMPTY_HASH = base64.b64encode(hashlib.md5(b"").digest()).decode(u"utf-8") def request(self, method, url, data=None, headers=None, **kwargs): """Implementation of Requests' request.""" response = tr_requests.AuthorizedSession.request( - self, method, url, data=data, headers=headers, **kwargs) - response.headers[download_mod._HASH_HEADER] = ( - u'md5={}'.format(self.EMPTY_HASH)) + self, method, url, data=data, headers=headers, **kwargs + ) + response.headers[download_mod._HASH_HEADER] = u"md5={}".format(self.EMPTY_HASH) return response # Transport that returns corrupt data, so we can exercise checksum handling. -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def corrupting_transport(): credentials, _ = google.auth.default(scopes=(utils.GCS_RW_SCOPE,)) yield CorruptingAuthorizedSession(credentials) @@ -125,22 +121,22 @@ def delete_blob(transport, blob_name): def _get_contents_for_upload(info): - with open(info[u'path'], u'rb') as file_obj: + with open(info[u"path"], u"rb") as file_obj: return file_obj.read() def _get_contents(info): - full_path = info[u'path'] - with open(full_path, u'rb') as file_obj: + full_path = info[u"path"] + with open(full_path, u"rb") as file_obj: return file_obj.read() def _get_blob_name(info): - full_path = info[u'path'] + full_path = info[u"path"] return os.path.basename(full_path) -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def add_files(authorized_transport, bucket): blob_names = [] for info in ALL_FILES: @@ -148,19 +144,19 @@ def add_files(authorized_transport, bucket): blob_name = _get_blob_name(info) blob_names.append(blob_name) - if u'metadata' in info: + if u"metadata" in info: upload = resumable_requests.MultipartUpload(utils.MULTIPART_UPLOAD) - metadata = copy.deepcopy(info[u'metadata']) - metadata[u'name'] = blob_name + metadata = copy.deepcopy(info[u"metadata"]) + metadata[u"name"] = blob_name response = upload.transmit( - authorized_transport, to_upload, - metadata, info[u'content_type']) + authorized_transport, to_upload, metadata, info[u"content_type"] + ) else: - upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format( - blob_name=blob_name) + upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) response = upload.transmit( - authorized_transport, to_upload, info[u'content_type']) + authorized_transport, to_upload, info[u"content_type"] + ) assert response.status_code == http_client.OK @@ -176,16 +172,17 @@ def check_tombstoned(download, transport): if isinstance(download, resumable_requests.Download): with pytest.raises(ValueError) as exc_info: download.consume(transport) - assert exc_info.match(u'A download can only be used once.') + assert exc_info.match(u"A download can only be used once.") else: with pytest.raises(ValueError) as exc_info: download.consume_next_chunk(transport) - assert exc_info.match(u'Download has finished.') + assert exc_info.match(u"Download has finished.") def read_raw_content(response): - return b''.join(response.raw.stream( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False)) + return b"".join( + response.raw.stream(_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + ) def test_download_full(add_files, authorized_transport): @@ -216,7 +213,7 @@ def test_download_to_stream(add_files, authorized_transport): response = download.consume(authorized_transport) assert response.status_code == http_client.OK with pytest.raises(RuntimeError) as exc_info: - getattr(response, u'content') + getattr(response, u"content") assert exc_info.value.args == (NO_BODY_ERR,) assert response._content is False assert response._content_consumed is True @@ -238,15 +235,17 @@ def test_corrupt_download(add_files, corrupting_transport): assert download.finished msg = download_mod._CHECKSUM_MISMATCH.format( - download.media_url, CorruptingAuthorizedSession.EMPTY_HASH, - info[u'checksum']) + download.media_url, + CorruptingAuthorizedSession.EMPTY_HASH, + info[u"checksum"], + ) assert exc_info.value.args == (msg,) -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def secret_file(authorized_transport, bucket): - blob_name = u'super-seekrit.txt' - data = b'Please do not tell anyone my encrypted seekrit.' + blob_name = u"super-seekrit.txt" + data = b"Please do not tell anyone my encrypted seekrit." upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) headers = utils.get_encryption_headers() @@ -290,7 +289,7 @@ def test_extra_headers(authorized_transport, secret_file): def test_non_existent_file(authorized_transport, bucket): - blob_name = u'does-not-exist.txt' + blob_name = u"does-not-exist.txt" media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) download = resumable_requests.Download(media_url) @@ -301,12 +300,12 @@ def test_non_existent_file(authorized_transport, bucket): check_tombstoned(download, authorized_transport) -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def simple_file(authorized_transport, bucket): - blob_name = u'basic-file.txt' + blob_name = u"basic-file.txt" upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) - data = b'Simple contents' + data = b"Simple contents" response = upload.transmit(authorized_transport, data, PLAIN_TEXT) assert response.status_code == http_client.OK @@ -330,8 +329,10 @@ def test_bad_range(simple_file, authorized_transport): download.consume(authorized_transport) check_error_response( - exc_info, http_client.REQUESTED_RANGE_NOT_SATISFIABLE, - b'Request range not satisfiable') + exc_info, + http_client.REQUESTED_RANGE_NOT_SATISFIABLE, + b"Request range not satisfiable", + ) check_tombstoned(download, authorized_transport) @@ -342,8 +343,7 @@ def _download_slice(media_url, slice_): if slice_.stop is not None: end = slice_.stop - 1 - return resumable_requests.Download( - media_url, start=slice_.start, end=end) + return resumable_requests.Download(media_url, start=slice_.start, end=end) def test_download_partial(add_files, authorized_transport): @@ -352,7 +352,7 @@ def test_download_partial(add_files, authorized_transport): blob_name = _get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u'slices']: + for slice_ in info[u"slices"]: download = _download_slice(media_url, slice_) response = download.consume(authorized_transport) assert response.status_code == http_client.PARTIAL_CONTENT @@ -375,8 +375,7 @@ def get_chunk_size(min_chunks, total_bytes): return num_chunks, chunk_size -def consume_chunks(download, authorized_transport, - total_bytes, actual_contents): +def consume_chunks(download, authorized_transport, total_bytes, actual_contents): start_byte = download.start end_byte = download.end if end_byte is None: @@ -407,12 +406,11 @@ def test_chunked_download_full(add_files, authorized_transport): # Create the actual download object. media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) stream = io.BytesIO() - download = resumable_requests.ChunkedDownload( - media_url, chunk_size, stream) + download = resumable_requests.ChunkedDownload(media_url, chunk_size, stream) # Consume the resource in chunks. num_responses, last_response = consume_chunks( - download, authorized_transport, - total_bytes, actual_contents) + download, authorized_transport, total_bytes, actual_contents + ) # Make sure the combined chunks are the whole object. assert stream.getvalue() == actual_contents # Check that we have the right number of responses. @@ -429,7 +427,7 @@ def test_chunked_download_partial(add_files, authorized_transport): blob_name = _get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u'slices']: + for slice_ in info[u"slices"]: # Manually replace a missing start with 0. start = 0 if slice_.start is None else slice_.start # Chunked downloads don't support a negative index. @@ -448,15 +446,16 @@ def test_chunked_download_partial(add_files, authorized_transport): end_byte = slice_.stop - 1 end = end_byte - num_chunks, chunk_size = get_chunk_size( - 7, end_byte - start + 1) + num_chunks, chunk_size = get_chunk_size(7, end_byte - start + 1) # Create the actual download object. stream = io.BytesIO() download = resumable_requests.ChunkedDownload( - media_url, chunk_size, stream, start=start, end=end) + media_url, chunk_size, stream, start=start, end=end + ) # Consume the resource in chunks. num_responses, last_response = consume_chunks( - download, authorized_transport, total_bytes, actual_contents) + download, authorized_transport, total_bytes, actual_contents + ) # Make sure the combined chunks are the whole slice. assert stream.getvalue() == actual_contents[slice_] @@ -476,10 +475,12 @@ def test_chunked_with_extra_headers(authorized_transport, secret_file): media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) stream = io.BytesIO() download = resumable_requests.ChunkedDownload( - media_url, chunk_size, stream, headers=headers) + media_url, chunk_size, stream, headers=headers + ) # Consume the resource in chunks. num_responses, last_response = consume_chunks( - download, authorized_transport, len(data), data) + download, authorized_transport, len(data), data + ) # Make sure the combined chunks are the whole object. assert stream.getvalue() == data # Check that we have the right number of responses. @@ -489,8 +490,7 @@ def test_chunked_with_extra_headers(authorized_transport, secret_file): check_tombstoned(download, authorized_transport) # Attempt to consume the resource **without** the headers. stream_wo = io.BytesIO() - download_wo = resumable_requests.ChunkedDownload( - media_url, chunk_size, stream_wo) + download_wo = resumable_requests.ChunkedDownload(media_url, chunk_size, stream_wo) with pytest.raises(resumable_media.InvalidResponse) as exc_info: download_wo.consume_next_chunk(authorized_transport) diff --git a/tests/system/requests/test_upload.py b/tests/system/requests/test_upload.py index 61648686..e39490c2 100644 --- a/tests/system/requests/test_upload.py +++ b/tests/system/requests/test_upload.py @@ -27,17 +27,18 @@ CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u'..', u'..', u'data') -ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, u'favicon.ico')) -IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, u'image1.jpg')) -ICO_CONTENT_TYPE = u'image/x-icon' -JPEG_CONTENT_TYPE = u'image/jpeg' -BYTES_CONTENT_TYPE = u'application/octet-stream' +DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") +ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, u"favicon.ico")) +IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, u"image1.jpg")) +ICO_CONTENT_TYPE = u"image/x-icon" +JPEG_CONTENT_TYPE = u"image/jpeg" +BYTES_CONTENT_TYPE = u"application/octet-stream" BAD_CHUNK_SIZE_MSG = ( - b'Invalid request. The number of bytes uploaded is required to be equal ' - b'or greater than 262144, except for the final request (it\'s recommended ' - b'to be the exact multiple of 262144). The received request contained ' - b'1024 bytes, which does not meet this requirement.') + b"Invalid request. The number of bytes uploaded is required to be equal " + b"or greater than 262144, except for the final request (it's recommended " + b"to be the exact multiple of 262144). The received request contained " + b"1024 bytes, which does not meet this requirement." +) @pytest.fixture @@ -60,25 +61,30 @@ def get_md5(data): return base64.b64encode(hash_obj.digest()) -def check_response(response, blob_name, actual_contents=None, - total_bytes=None, metadata=None, - content_type=ICO_CONTENT_TYPE): +def check_response( + response, + blob_name, + actual_contents=None, + total_bytes=None, + metadata=None, + content_type=ICO_CONTENT_TYPE, +): assert response.status_code == http_client.OK json_response = response.json() - assert json_response[u'bucket'] == utils.BUCKET_NAME - assert json_response[u'contentType'] == content_type + assert json_response[u"bucket"] == utils.BUCKET_NAME + assert json_response[u"contentType"] == content_type if actual_contents is not None: - md5_hash = json_response[u'md5Hash'].encode(u'ascii') + md5_hash = json_response[u"md5Hash"].encode(u"ascii") assert md5_hash == get_md5(actual_contents) total_bytes = len(actual_contents) - assert json_response[u'metageneration'] == u'1' - assert json_response[u'name'] == blob_name - assert json_response[u'size'] == u'{:d}'.format(total_bytes) - assert json_response[u'storageClass'] == u'STANDARD' + assert json_response[u"metageneration"] == u"1" + assert json_response[u"name"] == blob_name + assert json_response[u"size"] == u"{:d}".format(total_bytes) + assert json_response[u"storageClass"] == u"STANDARD" if metadata is None: - assert u'metadata' not in json_response + assert u"metadata" not in json_response else: - assert json_response[u'metadata'] == metadata + assert json_response[u"metadata"] == metadata def check_content(blob_name, expected_content, transport, headers=None): @@ -91,8 +97,7 @@ def check_content(blob_name, expected_content, transport, headers=None): def check_tombstoned(upload, transport, *args): assert upload.finished - basic_types = ( - resumable_requests.SimpleUpload, resumable_requests.MultipartUpload) + basic_types = (resumable_requests.SimpleUpload, resumable_requests.MultipartUpload) if isinstance(upload, basic_types): with pytest.raises(ValueError): upload.transmit(transport, *args) @@ -109,7 +114,7 @@ def check_does_not_exist(transport, blob_name): def test_simple_upload(authorized_transport, bucket, cleanup): - with open(ICO_FILE, u'rb') as file_obj: + with open(ICO_FILE, u"rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -121,18 +126,16 @@ def test_simple_upload(authorized_transport, bucket, cleanup): upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) # Transmit the resource. - response = upload.transmit( - authorized_transport, actual_contents, ICO_CONTENT_TYPE) + response = upload.transmit(authorized_transport, actual_contents, ICO_CONTENT_TYPE) check_response(response, blob_name, actual_contents=actual_contents) # Download the content to make sure it's "working as expected". check_content(blob_name, actual_contents, authorized_transport) # Make sure the upload is tombstoned. - check_tombstoned( - upload, authorized_transport, actual_contents, ICO_CONTENT_TYPE) + check_tombstoned(upload, authorized_transport, actual_contents, ICO_CONTENT_TYPE) def test_simple_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u'some-stuff.bin' + blob_name = u"some-stuff.bin" # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) @@ -142,21 +145,19 @@ def test_simple_upload_with_headers(authorized_transport, bucket, cleanup): headers = utils.get_encryption_headers() upload = resumable_requests.SimpleUpload(upload_url, headers=headers) # Transmit the resource. - data = b'Binary contents\x00\x01\x02.' + data = b"Binary contents\x00\x01\x02." response = upload.transmit(authorized_transport, data, BYTES_CONTENT_TYPE) check_response( - response, blob_name, actual_contents=data, - content_type=BYTES_CONTENT_TYPE) + response, blob_name, actual_contents=data, content_type=BYTES_CONTENT_TYPE + ) # Download the content to make sure it's "working as expected". - check_content( - blob_name, data, authorized_transport, headers=headers) + check_content(blob_name, data, authorized_transport, headers=headers) # Make sure the upload is tombstoned. - check_tombstoned( - upload, authorized_transport, data, BYTES_CONTENT_TYPE) + check_tombstoned(upload, authorized_transport, data, BYTES_CONTENT_TYPE) def test_multipart_upload(authorized_transport, bucket, cleanup): - with open(ICO_FILE, u'rb') as file_obj: + with open(ICO_FILE, u"rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -168,25 +169,26 @@ def test_multipart_upload(authorized_transport, bucket, cleanup): upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url) # Transmit the resource. - metadata = { - u'name': blob_name, - u'metadata': {u'color': u'yellow'}, - } + metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} response = upload.transmit( - authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE) + authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE + ) check_response( - response, blob_name, actual_contents=actual_contents, - metadata=metadata[u'metadata']) + response, + blob_name, + actual_contents=actual_contents, + metadata=metadata[u"metadata"], + ) # Download the content to make sure it's "working as expected". check_content(blob_name, actual_contents, authorized_transport) # Make sure the upload is tombstoned. check_tombstoned( - upload, authorized_transport, actual_contents, - metadata, ICO_CONTENT_TYPE) + upload, authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE + ) def test_multipart_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u'some-multipart-stuff.bin' + blob_name = u"some-multipart-stuff.bin" # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) @@ -196,19 +198,16 @@ def test_multipart_upload_with_headers(authorized_transport, bucket, cleanup): headers = utils.get_encryption_headers() upload = resumable_requests.MultipartUpload(upload_url, headers=headers) # Transmit the resource. - metadata = {u'name': blob_name} - data = b'Other binary contents\x03\x04\x05.' - response = upload.transmit( - authorized_transport, data, metadata, BYTES_CONTENT_TYPE) + metadata = {u"name": blob_name} + data = b"Other binary contents\x03\x04\x05." + response = upload.transmit(authorized_transport, data, metadata, BYTES_CONTENT_TYPE) check_response( - response, blob_name, actual_contents=data, - content_type=BYTES_CONTENT_TYPE) + response, blob_name, actual_contents=data, content_type=BYTES_CONTENT_TYPE + ) # Download the content to make sure it's "working as expected". - check_content( - blob_name, data, authorized_transport, headers=headers) + check_content(blob_name, data, authorized_transport, headers=headers) # Make sure the upload is tombstoned. - check_tombstoned( - upload, authorized_transport, data, metadata, BYTES_CONTENT_TYPE) + check_tombstoned(upload, authorized_transport, data, metadata, BYTES_CONTENT_TYPE) @pytest.fixture @@ -218,7 +217,7 @@ def img_stream(): This is so that an entire test can execute in the context of the context manager without worrying about closing the file. """ - with open(IMAGE_FILE, u'rb') as file_obj: + with open(IMAGE_FILE, u"rb") as file_obj: yield file_obj @@ -226,7 +225,7 @@ def get_upload_id(upload_url): parse_result = urllib_parse.urlparse(upload_url) parsed_query = urllib_parse.parse_qs(parse_result.query) # NOTE: We are unpacking here, so asserting exactly one match. - upload_id, = parsed_query[u'upload_id'] + upload_id, = parsed_query[u"upload_id"] return upload_id @@ -237,16 +236,21 @@ def get_num_chunks(total_bytes, chunk_size): return expected_chunks -def transmit_chunks(upload, transport, blob_name, metadata, - num_chunks=0, content_type=JPEG_CONTENT_TYPE): +def transmit_chunks( + upload, transport, blob_name, metadata, num_chunks=0, content_type=JPEG_CONTENT_TYPE +): while not upload.finished: num_chunks += 1 response = upload.transmit_next_chunk(transport) if upload.finished: assert upload.bytes_uploaded == upload.total_bytes check_response( - response, blob_name, total_bytes=upload.total_bytes, - metadata=metadata, content_type=content_type) + response, + blob_name, + total_bytes=upload.total_bytes, + metadata=metadata, + content_type=content_type, + ) else: assert upload.bytes_uploaded == num_chunks * upload.chunk_size assert response.status_code == resumable_media.PERMANENT_REDIRECT @@ -256,20 +260,18 @@ def transmit_chunks(upload, transport, blob_name, metadata, def check_initiate(response, upload, stream, transport, metadata): assert response.status_code == http_client.OK - assert response.content == b'' + assert response.content == b"" upload_id = get_upload_id(upload.resumable_url) - assert response.headers[u'x-guploader-uploadid'] == upload_id + assert response.headers[u"x-guploader-uploadid"] == upload_id assert stream.tell() == 0 # Make sure the upload cannot be re-initiated. with pytest.raises(ValueError) as exc_info: - upload.initiate( - transport, stream, metadata, JPEG_CONTENT_TYPE) + upload.initiate(transport, stream, metadata, JPEG_CONTENT_TYPE) - exc_info.match(u'This upload has already been initiated.') + exc_info.match(u"This upload has already been initiated.") -def _resumable_upload_helper(authorized_transport, stream, cleanup, - headers=None): +def _resumable_upload_helper(authorized_transport, stream, cleanup, headers=None): blob_name = os.path.basename(stream.name) # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) @@ -277,25 +279,24 @@ def _resumable_upload_helper(authorized_transport, stream, cleanup, # Create the actual upload object. chunk_size = resumable_media.UPLOAD_CHUNK_SIZE upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, chunk_size, headers=headers) + utils.RESUMABLE_UPLOAD, chunk_size, headers=headers + ) # Initiate the upload. - metadata = { - u'name': blob_name, - u'metadata': {u'direction': u'north'}, - } + metadata = {u"name": blob_name, u"metadata": {u"direction": u"north"}} response = upload.initiate( - authorized_transport, stream, metadata, JPEG_CONTENT_TYPE) + authorized_transport, stream, metadata, JPEG_CONTENT_TYPE + ) # Make sure ``initiate`` succeeded and did not mangle the stream. check_initiate(response, upload, stream, authorized_transport, metadata) # Actually upload the file in chunks. num_chunks = transmit_chunks( - upload, authorized_transport, blob_name, metadata[u'metadata']) + upload, authorized_transport, blob_name, metadata[u"metadata"] + ) assert num_chunks == get_num_chunks(upload.total_bytes, chunk_size) # Download the content to make sure it's "working as expected". stream.seek(0) actual_contents = stream.read() - check_content( - blob_name, actual_contents, authorized_transport, headers=headers) + check_content(blob_name, actual_contents, authorized_transport, headers=headers) # Make sure the upload is tombstoned. check_tombstoned(upload, authorized_transport) @@ -305,10 +306,10 @@ def test_resumable_upload(authorized_transport, img_stream, bucket, cleanup): def test_resumable_upload_with_headers( - authorized_transport, img_stream, bucket, cleanup): + authorized_transport, img_stream, bucket, cleanup +): headers = utils.get_encryption_headers() - _resumable_upload_helper( - authorized_transport, img_stream, cleanup, headers=headers) + _resumable_upload_helper(authorized_transport, img_stream, cleanup, headers=headers) def check_bad_chunk(upload, transport): @@ -324,18 +325,19 @@ def test_resumable_upload_bad_chunk_size(authorized_transport, img_stream): blob_name = os.path.basename(img_stream.name) # Create the actual upload object. upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, resumable_media.UPLOAD_CHUNK_SIZE) + utils.RESUMABLE_UPLOAD, resumable_media.UPLOAD_CHUNK_SIZE + ) # Modify the ``upload`` **after** construction so we can # use a bad chunk size. upload._chunk_size = 1024 assert upload._chunk_size < resumable_media.UPLOAD_CHUNK_SIZE # Initiate the upload. - metadata = {u'name': blob_name} + metadata = {u"name": blob_name} response = upload.initiate( - authorized_transport, img_stream, metadata, JPEG_CONTENT_TYPE) + authorized_transport, img_stream, metadata, JPEG_CONTENT_TYPE + ) # Make sure ``initiate`` succeeded and did not mangle the stream. - check_initiate( - response, upload, img_stream, authorized_transport, metadata) + check_initiate(response, upload, img_stream, authorized_transport, metadata) # Make the first request and verify that it fails. check_bad_chunk(upload, authorized_transport) # Reset the chunk size (and the stream) and verify the "resumable" @@ -361,22 +363,23 @@ def sabotage_and_recover(upload, stream, transport, chunk_size): assert stream.tell() == chunk_size -def _resumable_upload_recover_helper(authorized_transport, cleanup, - headers=None): - blob_name = u'some-bytes.bin' +def _resumable_upload_recover_helper(authorized_transport, cleanup, headers=None): + blob_name = u"some-bytes.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE - data = b'123' * chunk_size # 3 chunks worth. + data = b"123" * chunk_size # 3 chunks worth. # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) # Create the actual upload object. upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, chunk_size, headers=headers) + utils.RESUMABLE_UPLOAD, chunk_size, headers=headers + ) # Initiate the upload. - metadata = {u'name': blob_name} + metadata = {u"name": blob_name} stream = io.BytesIO(data) response = upload.initiate( - authorized_transport, stream, metadata, BYTES_CONTENT_TYPE) + authorized_transport, stream, metadata, BYTES_CONTENT_TYPE + ) # Make sure ``initiate`` succeeded and did not mangle the stream. check_initiate(response, upload, stream, authorized_transport, metadata) # Make the first request. @@ -386,13 +389,17 @@ def _resumable_upload_recover_helper(authorized_transport, cleanup, sabotage_and_recover(upload, stream, authorized_transport, chunk_size) # Now stream what remains. num_chunks = transmit_chunks( - upload, authorized_transport, blob_name, None, - num_chunks=1, content_type=BYTES_CONTENT_TYPE) + upload, + authorized_transport, + blob_name, + None, + num_chunks=1, + content_type=BYTES_CONTENT_TYPE, + ) assert num_chunks == 3 # Download the content to make sure it's "working as expected". actual_contents = stream.getvalue() - check_content( - blob_name, actual_contents, authorized_transport, headers=headers) + check_content(blob_name, actual_contents, authorized_transport, headers=headers) # Make sure the upload is tombstoned. check_tombstoned(upload, authorized_transport) @@ -401,30 +408,26 @@ def test_resumable_upload_recover(authorized_transport, bucket, cleanup): _resumable_upload_recover_helper(authorized_transport, cleanup) -def test_resumable_upload_recover_with_headers( - authorized_transport, bucket, cleanup): +def test_resumable_upload_recover_with_headers(authorized_transport, bucket, cleanup): headers = utils.get_encryption_headers() - _resumable_upload_recover_helper( - authorized_transport, cleanup, headers=headers) + _resumable_upload_recover_helper(authorized_transport, cleanup, headers=headers) class TestResumableUploadUnknownSize(object): - @staticmethod def _check_range_sent(response, start, end, total): headers_sent = response.request.headers if start is None and end is None: - expected_content_range = u'bytes */{:d}'.format(total) + expected_content_range = u"bytes */{:d}".format(total) else: # Allow total to be an int or a string "*" - expected_content_range = u'bytes {:d}-{:d}/{}'.format( - start, end, total) + expected_content_range = u"bytes {:d}-{:d}/{}".format(start, end, total) - assert headers_sent[u'content-range'] == expected_content_range + assert headers_sent[u"content-range"] == expected_content_range @staticmethod def _check_range_received(response, size): - assert response.headers[u'range'] == u'bytes=0-{:d}'.format(size - 1) + assert response.headers[u"range"] == u"bytes=0-{:d}".format(size - 1) def _check_partial(self, upload, response, chunk_size, num_chunks): start_byte = (num_chunks - 1) * chunk_size @@ -433,13 +436,12 @@ def _check_partial(self, upload, response, chunk_size, num_chunks): assert not upload.finished assert upload.bytes_uploaded == end_byte + 1 assert response.status_code == resumable_media.PERMANENT_REDIRECT - assert response.content == b'' + assert response.content == b"" - self._check_range_sent(response, start_byte, end_byte, u'*') + self._check_range_sent(response, start_byte, end_byte, u"*") self._check_range_received(response, end_byte + 1) - def test_smaller_than_chunk_size( - self, authorized_transport, bucket, cleanup): + def test_smaller_than_chunk_size(self, authorized_transport, bucket, cleanup): blob_name = os.path.basename(ICO_FILE) chunk_size = resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. @@ -449,17 +451,19 @@ def test_smaller_than_chunk_size( total_bytes = os.path.getsize(ICO_FILE) assert total_bytes < chunk_size # Create the actual upload object. - upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, chunk_size) + upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u'name': blob_name} - with open(ICO_FILE, u'rb') as stream: + metadata = {u"name": blob_name} + with open(ICO_FILE, u"rb") as stream: response = upload.initiate( - authorized_transport, stream, metadata, ICO_CONTENT_TYPE, - stream_final=False) + authorized_transport, + stream, + metadata, + ICO_CONTENT_TYPE, + stream_final=False, + ) # Make sure ``initiate`` succeeded and did not mangle the stream. - check_initiate( - response, upload, stream, authorized_transport, metadata) + check_initiate(response, upload, stream, authorized_transport, metadata) # Make sure total bytes was never set. assert upload.total_bytes is None # Make the **ONLY** request. @@ -469,32 +473,33 @@ def test_smaller_than_chunk_size( # Download the content to make sure it's "working as expected". stream.seek(0) actual_contents = stream.read() - check_content( - blob_name, actual_contents, authorized_transport) + check_content(blob_name, actual_contents, authorized_transport) # Make sure the upload is tombstoned. check_tombstoned(upload, authorized_transport) def test_finish_at_chunk(self, authorized_transport, bucket, cleanup): - blob_name = u'some-clean-stuff.bin' + blob_name = u"some-clean-stuff.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) # Make sure the blob size is an exact multiple of the chunk size. - data = b'ab' * chunk_size + data = b"ab" * chunk_size total_bytes = len(data) stream = io.BytesIO(data) # Create the actual upload object. - upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, chunk_size) + upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u'name': blob_name} + metadata = {u"name": blob_name} response = upload.initiate( - authorized_transport, stream, metadata, BYTES_CONTENT_TYPE, - stream_final=False) + authorized_transport, + stream, + metadata, + BYTES_CONTENT_TYPE, + stream_final=False, + ) # Make sure ``initiate`` succeeded and did not mangle the stream. - check_initiate( - response, upload, stream, authorized_transport, metadata) + check_initiate(response, upload, stream, authorized_transport, metadata) # Make sure total bytes was never set. assert upload.total_bytes is None # Make three requests. @@ -509,8 +514,12 @@ def test_finish_at_chunk(self, authorized_transport, bucket, cleanup): # Verify the "clean-up" request. assert upload.bytes_uploaded == 2 * chunk_size check_response( - response2, blob_name, actual_contents=data, - total_bytes=total_bytes, content_type=BYTES_CONTENT_TYPE) + response2, + blob_name, + actual_contents=data, + total_bytes=total_bytes, + content_type=BYTES_CONTENT_TYPE, + ) self._check_range_sent(response2, None, None, 2 * chunk_size) @staticmethod @@ -521,43 +530,48 @@ def _add_bytes(stream, data): stream.seek(curr_pos) def test_interleave_writes(self, authorized_transport, bucket, cleanup): - blob_name = u'some-moar-stuff.bin' + blob_name = u"some-moar-stuff.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) # Start out the blob as a single chunk (but we will add to it). - stream = io.BytesIO(b'Z' * chunk_size) + stream = io.BytesIO(b"Z" * chunk_size) # Create the actual upload object. - upload = resumable_requests.ResumableUpload( - utils.RESUMABLE_UPLOAD, chunk_size) + upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u'name': blob_name} + metadata = {u"name": blob_name} response = upload.initiate( - authorized_transport, stream, metadata, BYTES_CONTENT_TYPE, - stream_final=False) + authorized_transport, + stream, + metadata, + BYTES_CONTENT_TYPE, + stream_final=False, + ) # Make sure ``initiate`` succeeded and did not mangle the stream. - check_initiate( - response, upload, stream, authorized_transport, metadata) + check_initiate(response, upload, stream, authorized_transport, metadata) # Make sure total bytes was never set. assert upload.total_bytes is None # Make three requests. response0 = upload.transmit_next_chunk(authorized_transport) self._check_partial(upload, response0, chunk_size, 1) # Add another chunk before sending. - self._add_bytes(stream, b'K' * chunk_size) + self._add_bytes(stream, b"K" * chunk_size) response1 = upload.transmit_next_chunk(authorized_transport) self._check_partial(upload, response1, chunk_size, 2) # Add more bytes, but make sure less than a full chunk. last_chunk = 155 - self._add_bytes(stream, b'r' * last_chunk) + self._add_bytes(stream, b"r" * last_chunk) response2 = upload.transmit_next_chunk(authorized_transport) assert upload.finished # Verify the "clean-up" request. total_bytes = 2 * chunk_size + last_chunk assert upload.bytes_uploaded == total_bytes check_response( - response2, blob_name, actual_contents=stream.getvalue(), - total_bytes=total_bytes, content_type=BYTES_CONTENT_TYPE) - self._check_range_sent( - response2, 2 * chunk_size, total_bytes - 1, total_bytes) + response2, + blob_name, + actual_contents=stream.getvalue(), + total_bytes=total_bytes, + content_type=BYTES_CONTENT_TYPE, + ) + self._check_range_sent(response2, 2 * chunk_size, total_bytes - 1, total_bytes) diff --git a/tests/system/utils.py b/tests/system/utils.py index dc4eb7a4..25ee5378 100644 --- a/tests/system/utils.py +++ b/tests/system/utils.py @@ -17,27 +17,31 @@ import time -BUCKET_NAME = u'grpm-systest-{}'.format(int(1000 * time.time())) -BUCKET_POST_URL = u'https://www.googleapis.com/storage/v1/b/' -BUCKET_URL = u'https://www.googleapis.com/storage/v1/b/{}'.format(BUCKET_NAME) +BUCKET_NAME = u"grpm-systest-{}".format(int(1000 * time.time())) +BUCKET_POST_URL = u"https://www.googleapis.com/storage/v1/b/" +BUCKET_URL = u"https://www.googleapis.com/storage/v1/b/{}".format(BUCKET_NAME) -_DOWNLOAD_BASE = u'https://www.googleapis.com/download/storage/v1/b/{}'.format( - BUCKET_NAME) -DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + u'/o/{blob_name}?alt=media' +_DOWNLOAD_BASE = u"https://www.googleapis.com/download/storage/v1/b/{}".format( + BUCKET_NAME +) +DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + u"/o/{blob_name}?alt=media" -_UPLOAD_BASE = u'https://www.googleapis.com/upload/storage/v1/b/{}'.format( - BUCKET_NAME) + u'/o?uploadType=' -SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + u'media&name={blob_name}' -MULTIPART_UPLOAD = _UPLOAD_BASE + u'multipart' -RESUMABLE_UPLOAD = _UPLOAD_BASE + u'resumable' +_UPLOAD_BASE = ( + u"https://www.googleapis.com/upload/storage/v1/b/{}".format(BUCKET_NAME) + + u"/o?uploadType=" +) +SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + u"media&name={blob_name}" +MULTIPART_UPLOAD = _UPLOAD_BASE + u"multipart" +RESUMABLE_UPLOAD = _UPLOAD_BASE + u"resumable" -METADATA_URL_TEMPLATE = BUCKET_URL + u'/o/{blob_name}' +METADATA_URL_TEMPLATE = BUCKET_URL + u"/o/{blob_name}" -GCS_RW_SCOPE = u'https://www.googleapis.com/auth/devstorage.read_write' +GCS_RW_SCOPE = u"https://www.googleapis.com/auth/devstorage.read_write" # Generated using random.choice() with all 256 byte choices. ENCRYPTION_KEY = ( - b'R\xb8\x1b\x94T\xea_\xa8\x93\xae\xd1\xf6\xfca\x15\x0ekA' - b'\x08 Y\x13\xe2\n\x02i\xadc\xe2\xd99x') + b"R\xb8\x1b\x94T\xea_\xa8\x93\xae\xd1\xf6\xfca\x15\x0ekA" + b"\x08 Y\x13\xe2\n\x02i\xadc\xe2\xd99x" +) def get_encryption_headers(key=ENCRYPTION_KEY): @@ -59,7 +63,7 @@ def get_encryption_headers(key=ENCRYPTION_KEY): key_b64 = base64.b64encode(key) return { - u'x-goog-encryption-algorithm': u'AES256', - u'x-goog-encryption-key': key_b64.decode(u'utf-8'), - u'x-goog-encryption-key-sha256': key_hash_b64.decode(u'utf-8'), + u"x-goog-encryption-algorithm": u"AES256", + u"x-goog-encryption-key": key_b64.decode(u"utf-8"), + u"x-goog-encryption-key-sha256": key_hash_b64.decode(u"utf-8"), } diff --git a/tests/unit/requests/test__helpers.py b/tests/unit/requests/test__helpers.py index 4b3a2dbe..81d6a50e 100644 --- a/tests/unit/requests/test__helpers.py +++ b/tests/unit/requests/test__helpers.py @@ -21,68 +21,80 @@ class TestRequestsMixin(object): - def test__get_status_code(self): status_code = int(http_client.OK) response = _make_response(status_code) assert status_code == _helpers.RequestsMixin._get_status_code(response) def test__get_headers(self): - headers = {u'fruit': u'apple'} - response = mock.Mock(headers=headers, spec=['headers']) + headers = {u"fruit": u"apple"} + response = mock.Mock(headers=headers, spec=["headers"]) assert headers == _helpers.RequestsMixin._get_headers(response) def test__get_body_wo_content_consumed(self): - body = b'This is the payload.' - raw = mock.Mock(spec=['stream']) + body = b"This is the payload." + raw = mock.Mock(spec=["stream"]) raw.stream.return_value = iter([body]) - response = mock.Mock(raw=raw, _content=False, spec=['raw', '_content']) + response = mock.Mock(raw=raw, _content=False, spec=["raw", "_content"]) assert body == _helpers.RequestsMixin._get_body(response) raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def test__get_body_w_content_consumed(self): - body = b'This is the payload.' - response = mock.Mock(_content=body, spec=['_content']) + body = b"This is the payload." + response = mock.Mock(_content=body, spec=["_content"]) assert body == _helpers.RequestsMixin._get_body(response) def test_http_request(): transport, responses = _make_transport(http_client.OK) - method = u'POST' - url = u'http://test.invalid' + method = u"POST" + url = u"http://test.invalid" data = mock.sentinel.data - headers = {u'one': u'fish', u'blue': u'fish'} + headers = {u"one": u"fish", u"blue": u"fish"} timeout = mock.sentinel.timeout ret_val = _helpers.http_request( - transport, method, url, data=data, headers=headers, - extra1=b'work', extra2=125.5, timeout=timeout) + transport, + method, + url, + data=data, + headers=headers, + extra1=b"work", + extra2=125.5, + timeout=timeout, + ) assert ret_val is responses[0] transport.request.assert_called_once_with( - method, url, data=data, headers=headers, - extra1=b'work', extra2=125.5, timeout=timeout) + method, + url, + data=data, + headers=headers, + extra1=b"work", + extra2=125.5, + timeout=timeout, + ) def test_http_request_defaults(): transport, responses = _make_transport(http_client.OK) - method = u'POST' - url = u'http://test.invalid' + method = u"POST" + url = u"http://test.invalid" ret_val = _helpers.http_request(transport, method, url) assert ret_val is responses[0] transport.request.assert_called_once_with( - method, url, data=None, headers=None, - timeout=EXPECTED_TIMEOUT) + method, url, data=None, headers=None, timeout=EXPECTED_TIMEOUT + ) def _make_response(status_code): - return mock.Mock(status_code=status_code, spec=[u'status_code']) + return mock.Mock(status_code=status_code, spec=[u"status_code"]) def _make_transport(*status_codes): - transport = mock.Mock(spec=[u'request']) - responses = [ - _make_response(status_code) for status_code in status_codes] + transport = mock.Mock(spec=[u"request"]) + responses = [_make_response(status_code) for status_code in status_codes] transport.request.side_effect = responses return transport, responses diff --git a/tests/unit/requests/test_download.py b/tests/unit/requests/test_download.py index 41411dcb..54a14c28 100644 --- a/tests/unit/requests/test_download.py +++ b/tests/unit/requests/test_download.py @@ -24,19 +24,19 @@ EXAMPLE_URL = ( - u'https://www.googleapis.com/download/storage/v1/b/' - u'{BUCKET}/o/{OBJECT}?alt=media') + u"https://www.googleapis.com/download/storage/v1/b/" + u"{BUCKET}/o/{OBJECT}?alt=media" +) EXPECTED_TIMEOUT = (61, 60) class TestDownload(object): - - @mock.patch('google.resumable_media.requests.download._LOGGER') + @mock.patch("google.resumable_media.requests.download._LOGGER") def test__get_expected_md5_present(self, _LOGGER): download = download_mod.Download(EXAMPLE_URL) - checksum = u'b2twdXNodGhpc2J1dHRvbg==' - header_value = u'crc32c=3q2+7w==,md5={}'.format(checksum) + checksum = u"b2twdXNodGhpc2J1dHRvbg==" + header_value = u"crc32c=3q2+7w==,md5={}".format(checksum) headers = {download_mod._HASH_HEADER: header_value} response = _mock_response(headers=headers) @@ -44,7 +44,7 @@ def test__get_expected_md5_present(self, _LOGGER): assert expected_md5_hash == checksum _LOGGER.info.assert_not_called() - @mock.patch('google.resumable_media.requests.download._LOGGER') + @mock.patch("google.resumable_media.requests.download._LOGGER") def test__get_expected_md5_missing(self, _LOGGER): download = download_mod.Download(EXAMPLE_URL) @@ -60,8 +60,8 @@ def test__write_to_stream_no_hash_check(self): stream = io.BytesIO() download = download_mod.Download(EXAMPLE_URL, stream=stream) - chunk1 = b'right now, ' - chunk2 = b'but a little later' + chunk1 = b"right now, " + chunk2 = b"but a little later" response = _mock_response(chunks=[chunk1, chunk2], headers={}) ret_val = download._write_to_stream(response) @@ -73,19 +73,19 @@ def test__write_to_stream_no_hash_check(self): response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def test__write_to_stream_with_hash_check_success(self): stream = io.BytesIO() download = download_mod.Download(EXAMPLE_URL, stream=stream) - chunk1 = b'first chunk, count starting at 0. ' - chunk2 = b'second chunk, or chunk 1, which is better? ' - chunk3 = b'ordinals and numerals and stuff.' - header_value = u'crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==' + chunk1 = b"first chunk, count starting at 0. " + chunk2 = b"second chunk, or chunk 1, which is better? " + chunk3 = b"ordinals and numerals and stuff." + header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" headers = {download_mod._HASH_HEADER: header_value} - response = _mock_response( - chunks=[chunk1, chunk2, chunk3], headers=headers) + response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) ret_val = download._write_to_stream(response) assert ret_val is None @@ -96,20 +96,20 @@ def test__write_to_stream_with_hash_check_success(self): response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def test__write_to_stream_with_hash_check_fail(self): stream = io.BytesIO() download = download_mod.Download(EXAMPLE_URL, stream=stream) - chunk1 = b'first chunk, count starting at 0. ' - chunk2 = b'second chunk, or chunk 1, which is better? ' - chunk3 = b'ordinals and numerals and stuff.' - bad_checksum = u'd3JvbmcgbiBtYWRlIHVwIQ==' - header_value = u'crc32c=V0FUPw==,md5={}'.format(bad_checksum) + chunk1 = b"first chunk, count starting at 0. " + chunk2 = b"second chunk, or chunk 1, which is better? " + chunk3 = b"ordinals and numerals and stuff." + bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = u"crc32c=V0FUPw==,md5={}".format(bad_checksum) headers = {download_mod._HASH_HEADER: header_value} - response = _mock_response( - chunks=[chunk1, chunk2, chunk3], headers=headers) + response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) with pytest.raises(common.DataCorruption) as exc_info: download._write_to_stream(response) @@ -119,25 +119,29 @@ def test__write_to_stream_with_hash_check_fail(self): error = exc_info.value assert error.response is response assert len(error.args) == 1 - good_checksum = u'fPAJHnnoi/+NadyNxT2c2w==' + good_checksum = u"fPAJHnnoi/+NadyNxT2c2w==" msg = download_mod._CHECKSUM_MISMATCH.format( - EXAMPLE_URL, bad_checksum, good_checksum) + EXAMPLE_URL, bad_checksum, good_checksum + ) assert error.args[0] == msg # Check mocks. response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def _consume_helper( - self, stream=None, end=65536, headers=None, chunks=(), - response_headers=None): + self, stream=None, end=65536, headers=None, chunks=(), response_headers=None + ): download = download_mod.Download( - EXAMPLE_URL, stream=stream, end=end, headers=headers) - transport = mock.Mock(spec=['request']) + EXAMPLE_URL, stream=stream, end=end, headers=headers + ) + transport = mock.Mock(spec=["request"]) transport.request.return_value = _mock_response( - chunks=chunks, headers=response_headers) + chunks=chunks, headers=response_headers + ) assert not download.finished ret_val = download.consume(transport) @@ -147,7 +151,7 @@ def _consume_helper( assert stream is not None transport.request.assert_called_once_with( - u'GET', + u"GET", EXAMPLE_URL, data=None, headers=download._headers, @@ -155,8 +159,8 @@ def _consume_helper( timeout=EXPECTED_TIMEOUT, ) - range_bytes = u'bytes={:d}-{:d}'.format(0, end) - assert download._headers[u'range'] == range_bytes + range_bytes = u"bytes={:d}-{:d}".format(0, end) + assert download._headers[u"range"] == range_bytes assert download.finished return transport @@ -166,107 +170,107 @@ def test_consume(self): def test_consume_with_stream(self): stream = io.BytesIO() - chunks = (b'up down ', b'charlie ', b'brown') + chunks = (b"up down ", b"charlie ", b"brown") transport = self._consume_helper(stream=stream, chunks=chunks) - assert stream.getvalue() == b''.join(chunks) + assert stream.getvalue() == b"".join(chunks) # Check mocks. response = transport.request.return_value response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def test_consume_with_stream_hash_check_success(self): stream = io.BytesIO() - chunks = (b'up down ', b'charlie ', b'brown') - header_value = u'md5=JvS1wjMvfbCXgEGeaJJLDQ==' + chunks = (b"up down ", b"charlie ", b"brown") + header_value = u"md5=JvS1wjMvfbCXgEGeaJJLDQ==" headers = {download_mod._HASH_HEADER: header_value} transport = self._consume_helper( - stream=stream, chunks=chunks, response_headers=headers) + stream=stream, chunks=chunks, response_headers=headers + ) - assert stream.getvalue() == b''.join(chunks) + assert stream.getvalue() == b"".join(chunks) # Check mocks. response = transport.request.return_value response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.raw.stream.assert_called_once_with( - _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False) + _helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False + ) def test_consume_with_stream_hash_check_fail(self): stream = io.BytesIO() - download = download_mod.Download( - EXAMPLE_URL, stream=stream) + download = download_mod.Download(EXAMPLE_URL, stream=stream) - chunks = (b'zero zero', b'niner tango') - bad_checksum = u'anVzdCBub3QgdGhpcyAxLA==' - header_value = u'crc32c=V0FUPw==,md5={}'.format(bad_checksum) + chunks = (b"zero zero", b"niner tango") + bad_checksum = u"anVzdCBub3QgdGhpcyAxLA==" + header_value = u"crc32c=V0FUPw==,md5={}".format(bad_checksum) headers = {download_mod._HASH_HEADER: header_value} - transport = mock.Mock(spec=['request']) - transport.request.return_value = _mock_response( - chunks=chunks, headers=headers) + transport = mock.Mock(spec=["request"]) + transport.request.return_value = _mock_response(chunks=chunks, headers=headers) assert not download.finished with pytest.raises(common.DataCorruption) as exc_info: download.consume(transport) - assert stream.getvalue() == b''.join(chunks) + assert stream.getvalue() == b"".join(chunks) assert download.finished assert download._headers == {} error = exc_info.value assert error.response is transport.request.return_value assert len(error.args) == 1 - good_checksum = u'1A/dxEpys717C6FH7FIWDw==' + good_checksum = u"1A/dxEpys717C6FH7FIWDw==" msg = download_mod._CHECKSUM_MISMATCH.format( - EXAMPLE_URL, bad_checksum, good_checksum) + EXAMPLE_URL, bad_checksum, good_checksum + ) assert error.args[0] == msg # Check mocks. transport.request.assert_called_once_with( - u'GET', EXAMPLE_URL, data=None, headers={}, stream=True, - timeout=EXPECTED_TIMEOUT) + u"GET", + EXAMPLE_URL, + data=None, + headers={}, + stream=True, + timeout=EXPECTED_TIMEOUT, + ) def test_consume_with_headers(self): headers = {} # Empty headers end = 16383 self._consume_helper(end=end, headers=headers) - range_bytes = u'bytes={:d}-{:d}'.format(0, end) + range_bytes = u"bytes={:d}-{:d}".format(0, end) # Make sure the headers have been modified. - assert headers == {u'range': range_bytes} + assert headers == {u"range": range_bytes} class TestChunkedDownload(object): - @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u'bytes {:d}-{:d}/{:d}'.format( - start_byte, end_byte, total_bytes) + return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 - resp_range = self._response_content_range( - start_byte, end_byte, total_bytes) + resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u'content-length': u'{:d}'.format(content_length), - u'content-range': resp_range, + u"content-length": u"{:d}".format(content_length), + u"content-range": resp_range, } - def _mock_response(self, start_byte, end_byte, total_bytes, - content=None, status_code=None): - response_headers = self._response_headers( - start_byte, end_byte, total_bytes) + def _mock_response( + self, start_byte, end_byte, total_bytes, content=None, status_code=None + ): + response_headers = self._response_headers(start_byte, end_byte, total_bytes) return mock.Mock( _content=content, headers=response_headers, status_code=status_code, - spec=[ - u'_content', - u'headers', - u'status_code', - ], + spec=[u"_content", u"headers", u"status_code"], ) def test_consume_next_chunk_already_finished(self): @@ -275,25 +279,29 @@ def test_consume_next_chunk_already_finished(self): with pytest.raises(ValueError): download.consume_next_chunk(None) - def _mock_transport(self, start, chunk_size, total_bytes, content=b''): - transport = mock.Mock(spec=['request']) + def _mock_transport(self, start, chunk_size, total_bytes, content=b""): + transport = mock.Mock(spec=["request"]) assert len(content) == chunk_size transport.request.return_value = self._mock_response( - start, start + chunk_size - 1, total_bytes, - content=content, status_code=int(http_client.OK)) + start, + start + chunk_size - 1, + total_bytes, + content=content, + status_code=int(http_client.OK), + ) return transport def test_consume_next_chunk(self): start = 1536 stream = io.BytesIO() - data = b'Just one chunk.' + data = b"Just one chunk." chunk_size = len(data) download = download_mod.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream, start=start) + EXAMPLE_URL, chunk_size, stream, start=start + ) total_bytes = 16384 - transport = self._mock_transport( - start, chunk_size, total_bytes, content=data) + transport = self._mock_transport(start, chunk_size, total_bytes, content=data) # Verify the internal state before consuming a chunk. assert not download.finished @@ -302,10 +310,10 @@ def test_consume_next_chunk(self): # Actually consume the chunk and check the output. ret_val = download.consume_next_chunk(transport) assert ret_val is transport.request.return_value - range_bytes = u'bytes={:d}-{:d}'.format(start, start + chunk_size - 1) - download_headers = {u'range': range_bytes} + range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {u"range": range_bytes} transport.request.assert_called_once_with( - u'GET', + u"GET", EXAMPLE_URL, data=None, headers=download_headers, @@ -321,8 +329,8 @@ def test_consume_next_chunk(self): class Test__parse_md5_header(object): - CRC32C_CHECKSUM = u'3q2+7w==' - MD5_CHECKSUM = u'c2l4dGVlbmJ5dGVzbG9uZw==' + CRC32C_CHECKSUM = u"3q2+7w==" + MD5_CHECKSUM = u"c2l4dGVlbmJ5dGVzbG9uZw==" def test_empty_value(self): header_value = None @@ -331,28 +339,28 @@ def test_empty_value(self): assert md5_header is None def test_crc32c_only(self): - header_value = u'crc32c={}'.format(self.CRC32C_CHECKSUM) + header_value = u"crc32c={}".format(self.CRC32C_CHECKSUM) response = None md5_header = download_mod._parse_md5_header(header_value, response) assert md5_header is None def test_md5_only(self): - header_value = u'md5={}'.format(self.MD5_CHECKSUM) + header_value = u"md5={}".format(self.MD5_CHECKSUM) response = None md5_header = download_mod._parse_md5_header(header_value, response) assert md5_header == self.MD5_CHECKSUM def test_both_crc32c_and_md5(self): - header_value = u'crc32c={},md5={}'.format( - self.CRC32C_CHECKSUM, self.MD5_CHECKSUM) + header_value = u"crc32c={},md5={}".format( + self.CRC32C_CHECKSUM, self.MD5_CHECKSUM + ) response = None md5_header = download_mod._parse_md5_header(header_value, response) assert md5_header == self.MD5_CHECKSUM def test_md5_multiple_matches(self): - another_checksum = u'eW91IGRpZCBXQVQgbm93Pw==' - header_value = u'md5={},md5={}'.format( - self.MD5_CHECKSUM, another_checksum) + another_checksum = u"eW91IGRpZCBXQVQgbm93Pw==" + header_value = u"md5={},md5={}".format(self.MD5_CHECKSUM, another_checksum) response = mock.sentinel.response with pytest.raises(common.InvalidResponse) as exc_info: @@ -367,7 +375,7 @@ def test_md5_multiple_matches(self): def test__DoNothingHash(): do_nothing_hash = download_mod._DoNothingHash() - return_value = do_nothing_hash.update(b'some data') + return_value = do_nothing_hash.update(b"some data") assert return_value is None @@ -376,20 +384,13 @@ def _mock_response(status_code=http_client.OK, chunks=(), headers=None): headers = {} if chunks: - mock_raw = mock.Mock(headers=headers, spec=['headers', 'stream']) + mock_raw = mock.Mock(headers=headers, spec=["headers", "stream"]) mock_raw.stream.return_value = iter(chunks) response = mock.MagicMock( headers=headers, status_code=int(status_code), raw=mock_raw, - spec=[ - '__enter__', - '__exit__', - 'raw', - 'status_code', - 'headers', - 'raw', - ], + spec=["__enter__", "__exit__", "raw", "status_code", "headers", "raw"], ) # i.e. context manager returns ``self``. response.__enter__.return_value = response @@ -399,8 +400,5 @@ def _mock_response(status_code=http_client.OK, chunks=(), headers=None): return mock.Mock( headers=headers, status_code=int(status_code), - spec=[ - 'status_code', - 'headers', - ], + spec=["status_code", "headers"], ) diff --git a/tests/unit/requests/test_upload.py b/tests/unit/requests/test_upload.py index ab6a6431..6017708e 100644 --- a/tests/unit/requests/test_upload.py +++ b/tests/unit/requests/test_upload.py @@ -23,85 +23,94 @@ SIMPLE_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=media&name={OBJECT}') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=media&name={OBJECT}" +) MULTIPART_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=multipart') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=multipart" +) RESUMABLE_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=resumable') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=resumable" +) ONE_MB = 1024 * 1024 -BASIC_CONTENT = u'text/plain' -JSON_TYPE = u'application/json; charset=UTF-8' -JSON_TYPE_LINE = b'content-type: application/json; charset=UTF-8\r\n' +BASIC_CONTENT = u"text/plain" +JSON_TYPE = u"application/json; charset=UTF-8" +JSON_TYPE_LINE = b"content-type: application/json; charset=UTF-8\r\n" EXPECTED_TIMEOUT = (61, 60) class TestSimpleUpload(object): - def test_transmit(self): - data = b'I have got a lovely bunch of coconuts.' + data = b"I have got a lovely bunch of coconuts." content_type = BASIC_CONTENT upload = upload_mod.SimpleUpload(SIMPLE_URL) - transport = mock.Mock(spec=[u'request']) + transport = mock.Mock(spec=[u"request"]) transport.request.return_value = _make_response() assert not upload.finished ret_val = upload.transmit(transport, data, content_type) assert ret_val is transport.request.return_value - upload_headers = {u'content-type': content_type} + upload_headers = {u"content-type": content_type} transport.request.assert_called_once_with( - u'POST', SIMPLE_URL, data=data, headers=upload_headers, - timeout=EXPECTED_TIMEOUT) + u"POST", + SIMPLE_URL, + data=data, + headers=upload_headers, + timeout=EXPECTED_TIMEOUT, + ) assert upload.finished class TestMultipartUpload(object): - - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==4==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==4==") def test_transmit(self, mock_get_boundary): - data = b'Mock data here and there.' - metadata = {u'Hey': u'You', u'Guys': u'90909'} + data = b"Mock data here and there." + metadata = {u"Hey": u"You", u"Guys": u"90909"} content_type = BASIC_CONTENT upload = upload_mod.MultipartUpload(MULTIPART_URL) - transport = mock.Mock(spec=[u'request']) + transport = mock.Mock(spec=[u"request"]) transport.request.return_value = _make_response() assert not upload.finished ret_val = upload.transmit(transport, data, metadata, content_type) assert ret_val is transport.request.return_value expected_payload = ( - b'--==4==\r\n' + - JSON_TYPE_LINE + - b'\r\n' + - json.dumps(metadata).encode(u'utf-8') + b'\r\n' + - b'--==4==\r\n' - b'content-type: text/plain\r\n' - b'\r\n' - b'Mock data here and there.\r\n' - b'--==4==--') + b"--==4==\r\n" + + JSON_TYPE_LINE + + b"\r\n" + + json.dumps(metadata).encode(u"utf-8") + + b"\r\n" + + b"--==4==\r\n" + b"content-type: text/plain\r\n" + b"\r\n" + b"Mock data here and there.\r\n" + b"--==4==--" + ) multipart_type = b'multipart/related; boundary="==4=="' - upload_headers = {u'content-type': multipart_type} + upload_headers = {u"content-type": multipart_type} transport.request.assert_called_once_with( - u'POST', MULTIPART_URL, data=expected_payload, - headers=upload_headers, timeout=EXPECTED_TIMEOUT) + u"POST", + MULTIPART_URL, + data=expected_payload, + headers=upload_headers, + timeout=EXPECTED_TIMEOUT, + ) assert upload.finished mock_get_boundary.assert_called_once_with() class TestResumableUpload(object): - def test_initiate(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) - data = b'Knock knock who is there' + data = b"Knock knock who is there" stream = io.BytesIO(data) - metadata = {u'name': u'got-jokes.txt'} + metadata = {u"name": u"got-jokes.txt"} - transport = mock.Mock(spec=[u'request']) - location = u'http://test.invalid?upload_id=AACODBBBxuw9u3AA', - response_headers = {u'location': location} + transport = mock.Mock(spec=[u"request"]) + location = (u"http://test.invalid?upload_id=AACODBBBxuw9u3AA",) + response_headers = {u"location": location} post_response = _make_response(headers=response_headers) transport.request.return_value = post_response # Check resumable_url before. @@ -110,52 +119,60 @@ def test_initiate(self): total_bytes = 100 assert total_bytes > len(data) response = upload.initiate( - transport, stream, metadata, BASIC_CONTENT, - total_bytes=total_bytes, stream_final=False) + transport, + stream, + metadata, + BASIC_CONTENT, + total_bytes=total_bytes, + stream_final=False, + ) assert response is transport.request.return_value # Check resumable_url after. assert upload._resumable_url == location # Make sure the mock was called as expected. json_bytes = b'{"name": "got-jokes.txt"}' expected_headers = { - u'content-type': JSON_TYPE, - u'x-upload-content-type': BASIC_CONTENT, - u'x-upload-content-length': u'{:d}'.format(total_bytes), + u"content-type": JSON_TYPE, + u"x-upload-content-type": BASIC_CONTENT, + u"x-upload-content-length": u"{:d}".format(total_bytes), } transport.request.assert_called_once_with( - u'POST', RESUMABLE_URL, data=json_bytes, headers=expected_headers, - timeout=EXPECTED_TIMEOUT) + u"POST", + RESUMABLE_URL, + data=json_bytes, + headers=expected_headers, + timeout=EXPECTED_TIMEOUT, + ) @staticmethod def _upload_in_flight(data, headers=None): - upload = upload_mod.ResumableUpload( - RESUMABLE_URL, ONE_MB, headers=headers) + upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=headers) upload._stream = io.BytesIO(data) upload._content_type = BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u'http://test.invalid?upload_id=not-none' + upload._resumable_url = u"http://test.invalid?upload_id=not-none" return upload @staticmethod def _chunk_mock(status_code, response_headers): - transport = mock.Mock(spec=[u'request']) - put_response = _make_response( - status_code=status_code, headers=response_headers) + transport = mock.Mock(spec=[u"request"]) + put_response = _make_response(status_code=status_code, headers=response_headers) transport.request.return_value = put_response return transport def test_transmit_next_chunk(self): - data = b'This time the data is official.' + data = b"This time the data is official." upload = self._upload_in_flight(data) # Make a fake chunk size smaller than 256 KB. chunk_size = 10 assert chunk_size < len(data) upload._chunk_size = chunk_size # Make a fake 308 response. - response_headers = {u'range': u'bytes=0-{:d}'.format(chunk_size - 1)} + response_headers = {u"range": u"bytes=0-{:d}".format(chunk_size - 1)} transport = self._chunk_mock( - resumable_media.PERMANENT_REDIRECT, response_headers) + resumable_media.PERMANENT_REDIRECT, response_headers + ) # Check the state before the request. assert upload._bytes_uploaded == 0 @@ -166,25 +183,28 @@ def test_transmit_next_chunk(self): assert upload._bytes_uploaded == chunk_size # Make sure the mock was called as expected. payload = data[:chunk_size] - content_range = u'bytes 0-{:d}/{:d}'.format(chunk_size - 1, len(data)) + content_range = u"bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) expected_headers = { - u'content-range': content_range, - u'content-type': BASIC_CONTENT, + u"content-range": content_range, + u"content-type": BASIC_CONTENT, } transport.request.assert_called_once_with( - u'PUT', upload.resumable_url, data=payload, - headers=expected_headers, timeout=EXPECTED_TIMEOUT) + u"PUT", + upload.resumable_url, + data=payload, + headers=expected_headers, + timeout=EXPECTED_TIMEOUT, + ) def test_recover(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) upload._invalid = True # Make sure invalid. - upload._stream = mock.Mock(spec=[u'seek']) - upload._resumable_url = u'http://test.invalid?upload_id=big-deal' + upload._stream = mock.Mock(spec=[u"seek"]) + upload._resumable_url = u"http://test.invalid?upload_id=big-deal" end = 55555 - headers = {u'range': u'bytes=0-{:d}'.format(end)} - transport = self._chunk_mock( - resumable_media.PERMANENT_REDIRECT, headers) + headers = {u"range": u"bytes=0-{:d}".format(end)} + transport = self._chunk_mock(resumable_media.PERMANENT_REDIRECT, headers) ret_val = upload.recover(transport) assert ret_val is transport.request.return_value @@ -192,14 +212,18 @@ def test_recover(self): assert upload.bytes_uploaded == end + 1 assert not upload.invalid upload._stream.seek.assert_called_once_with(end + 1) - expected_headers = {u'content-range': u'bytes */*'} + expected_headers = {u"content-range": u"bytes */*"} transport.request.assert_called_once_with( - u'PUT', upload.resumable_url, data=None, headers=expected_headers, - timeout=EXPECTED_TIMEOUT) + u"PUT", + upload.resumable_url, + data=None, + headers=expected_headers, + timeout=EXPECTED_TIMEOUT, + ) def _make_response(status_code=http_client.OK, headers=None): headers = headers or {} return mock.Mock( - headers=headers, status_code=status_code, - spec=[u'headers', u'status_code']) + headers=headers, status_code=status_code, spec=[u"headers", u"status_code"] + ) diff --git a/tests/unit/test__download.py b/tests/unit/test__download.py index f7ac008f..bfad49c1 100644 --- a/tests/unit/test__download.py +++ b/tests/unit/test__download.py @@ -23,12 +23,12 @@ EXAMPLE_URL = ( - u'https://www.googleapis.com/download/storage/v1/b/' - u'{BUCKET}/o/{OBJECT}?alt=media') + u"https://www.googleapis.com/download/storage/v1/b/" + u"{BUCKET}/o/{OBJECT}?alt=media" +) class TestDownloadBase(object): - def test_constructor_defaults(self): download = _download.DownloadBase(EXAMPLE_URL) assert download.media_url == EXAMPLE_URL @@ -42,10 +42,14 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): start = 11 end = 10001 - headers = {u'foof': u'barf'} + headers = {u"foof": u"barf"} download = _download.DownloadBase( - EXAMPLE_URL, stream=mock.sentinel.stream, - start=start, end=end, headers=headers) + EXAMPLE_URL, + stream=mock.sentinel.stream, + start=start, + end=end, + headers=headers, + ) assert download.media_url == EXAMPLE_URL assert download._stream is mock.sentinel.stream assert download.start == start @@ -71,23 +75,22 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_status_code(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_headers(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_body(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class TestDownload(object): - def test__prepare_request_already_finished(self): download = _download.Download(EXAMPLE_URL) download._finished = True @@ -97,28 +100,27 @@ def test__prepare_request_already_finished(self): def test__prepare_request(self): download1 = _download.Download(EXAMPLE_URL) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u'GET' + assert method1 == u"GET" assert url1 == EXAMPLE_URL assert payload1 is None assert headers1 == {} download2 = _download.Download(EXAMPLE_URL, start=53) method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u'GET' + assert method2 == u"GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u'range': u'bytes=53-'} + assert headers2 == {u"range": u"bytes=53-"} def test__prepare_request_with_headers(self): - headers = {u'spoonge': u'borb'} - download = _download.Download( - EXAMPLE_URL, start=11, end=111, headers=headers) + headers = {u"spoonge": u"borb"} + download = _download.Download(EXAMPLE_URL, start=11, end=111, headers=headers) method, url, payload, new_headers = download._prepare_request() - assert method == u'GET' + assert method == u"GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - assert headers == {u'range': u'bytes=11-111', u'spoonge': u'borb'} + assert headers == {u"range": u"bytes=11-111", u"spoonge": u"borb"} def test__process_response(self): download = _download.Download(EXAMPLE_URL) @@ -126,8 +128,7 @@ def test__process_response(self): # Make sure **not finished** before. assert not download.finished - response = mock.Mock( - status_code=int(http_client.OK), spec=['status_code']) + response = mock.Mock(status_code=int(http_client.OK), spec=["status_code"]) ret_val = download._process_response(response) assert ret_val is None # Make sure **finished** after. @@ -140,7 +141,8 @@ def test__process_response_bad_status(self): # Make sure **not finished** before. assert not download.finished response = mock.Mock( - status_code=int(http_client.NOT_FOUND), spec=['status_code']) + status_code=int(http_client.NOT_FOUND), spec=["status_code"] + ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -158,16 +160,14 @@ def test_consume(self): with pytest.raises(NotImplementedError) as exc_info: download.consume(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class TestChunkedDownload(object): - def test_constructor_defaults(self): chunk_size = 256 stream = mock.sentinel.stream - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) assert download.media_url == EXAMPLE_URL assert download.chunk_size == chunk_size assert download.start == 0 @@ -222,7 +222,8 @@ def test__get_byte_range_with_end(self): start = 1024 end = 1151 download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, None, start=start, end=end) + EXAMPLE_URL, chunk_size, None, start=start, end=end + ) curr_start, curr_end = download._get_byte_range() assert curr_start == start assert curr_end == end @@ -244,25 +245,26 @@ def test__get_byte_range_with_total_bytes(self): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u'bytes {:d}-{:d}/{:d}'.format( - start_byte, end_byte, total_bytes) + return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 - resp_range = self._response_content_range( - start_byte, end_byte, total_bytes) + resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u'content-length': u'{:d}'.format(content_length), - u'content-range': resp_range, + u"content-length": u"{:d}".format(content_length), + u"content-range": resp_range, } - def _mock_response(self, start_byte, end_byte, total_bytes, - content=None, status_code=None): - response_headers = self._response_headers( - start_byte, end_byte, total_bytes) + def _mock_response( + self, start_byte, end_byte, total_bytes, content=None, status_code=None + ): + response_headers = self._response_headers(start_byte, end_byte, total_bytes) return mock.Mock( - content=content, headers=response_headers, status_code=status_code, - spec=['content', 'headers', 'status_code']) + content=content, + headers=response_headers, + status_code=status_code, + spec=["content", "headers", "status_code"], + ) def test__prepare_request_already_finished(self): download = _download.ChunkedDownload(EXAMPLE_URL, 64, None) @@ -270,7 +272,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u'Download has finished.') + assert exc_info.match(u"Download has finished.") def test__prepare_request_invalid(self): download = _download.ChunkedDownload(EXAMPLE_URL, 64, None) @@ -278,37 +280,39 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u'Download is invalid and cannot be re-used.') + assert exc_info.match(u"Download is invalid and cannot be re-used.") def test__prepare_request(self): chunk_size = 2048 download1 = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, None) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u'GET' + assert method1 == u"GET" assert url1 == EXAMPLE_URL assert payload1 is None - assert headers1 == {u'range': u'bytes=0-2047'} + assert headers1 == {u"range": u"bytes=0-2047"} download2 = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, None, start=19991) + EXAMPLE_URL, chunk_size, None, start=19991 + ) download2._total_bytes = 20101 method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u'GET' + assert method2 == u"GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u'range': u'bytes=19991-20100'} + assert headers2 == {u"range": u"bytes=19991-20100"} def test__prepare_request_with_headers(self): chunk_size = 2048 - headers = {u'patrizio': u'Starf-ish'} + headers = {u"patrizio": u"Starf-ish"} download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, None, headers=headers) + EXAMPLE_URL, chunk_size, None, headers=headers + ) method, url, payload, new_headers = download._prepare_request() - assert method == u'GET' + assert method == u"GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - expected = {u'patrizio': u'Starf-ish', u'range': u'bytes=0-2047'} + expected = {u"patrizio": u"Starf-ish", u"range": u"bytes=0-2047"} assert headers == expected def test__make_invalid(self): @@ -318,11 +322,10 @@ def test__make_invalid(self): assert download.invalid def test__process_response(self): - data = b'1234xyztL' * 37 + data = b"1234xyztL" * 37 chunk_size = len(data) stream = io.BytesIO() - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) already = 22 @@ -335,8 +338,12 @@ def test__process_response(self): assert download.total_bytes is None # Actually call the method to update. response = self._mock_response( - already, already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT)) + already, + already + chunk_size - 1, + total_bytes, + content=data, + status_code=int(http_client.PARTIAL_CONTENT), + ) download._process_response(response) # Check internal state after. assert not download.finished @@ -345,11 +352,10 @@ def test__process_response(self): assert stream.getvalue() == data def test__process_response_transfer_encoding(self): - data = b'1234xyztL' * 37 + data = b"1234xyztL" * 37 chunk_size = len(data) stream = io.BytesIO() - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) already = 22 @@ -363,10 +369,14 @@ def test__process_response_transfer_encoding(self): assert not download.invalid # Actually call the method to update. response = self._mock_response( - already, already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT)) - response.headers[u'transfer-encoding'] = 'chunked' - del response.headers[u'content-length'] + already, + already + chunk_size - 1, + total_bytes, + content=data, + status_code=int(http_client.PARTIAL_CONTENT), + ) + response.headers[u"transfer-encoding"] = "chunked" + del response.headers[u"content-length"] download._process_response(response) # Check internal state after. assert not download.finished @@ -376,9 +386,8 @@ def test__process_response_transfer_encoding(self): def test__process_response_bad_status(self): chunk_size = 384 - stream = mock.Mock(spec=['write']) - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + stream = mock.Mock(spec=["write"]) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) total_bytes = 300 @@ -389,8 +398,8 @@ def test__process_response_bad_status(self): assert download.total_bytes is None # Actually call the method to update. response = self._mock_response( - 0, total_bytes - 1, total_bytes, - status_code=int(http_client.NOT_FOUND)) + 0, total_bytes - 1, total_bytes, status_code=int(http_client.NOT_FOUND) + ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -417,14 +426,12 @@ def test__process_response_missing_content_length(self): assert download.total_bytes is None assert not download.invalid # Actually call the method to update. - headers = { - u'content-range': u'bytes 0-99/99', - } + headers = {u"content-range": u"bytes 0-99/99"} response = mock.Mock( headers=headers, status_code=int(http_client.PARTIAL_CONTENT), - content=b'DEADBEEF', - spec=['headers', 'status_code', 'content'], + content=b"DEADBEEF", + spec=["headers", "status_code", "content"], ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -432,7 +439,7 @@ def test__process_response_missing_content_length(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u'content-length' + assert error.args[1] == u"content-length" # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -449,22 +456,24 @@ def test__process_response_bad_content_range(self): assert download.total_bytes is None assert not download.invalid # Actually call the method to update. - data = b'stuff' + data = b"stuff" headers = { - u'content-length': u'{:d}'.format(len(data)), - u'content-range': u'kites x-y/58', + u"content-length": u"{:d}".format(len(data)), + u"content-range": u"kites x-y/58", } response = mock.Mock( - content=data, headers=headers, + content=data, + headers=headers, status_code=int(http_client.PARTIAL_CONTENT), - spec=['content', 'headers', 'status_code']) + spec=["content", "headers", "status_code"], + ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u'content-range'] + assert error.args[1] == headers[u"content-range"] # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -473,9 +482,8 @@ def test__process_response_bad_content_range(self): def test__process_response_body_wrong_length(self): chunk_size = 10 - stream = mock.Mock(spec=['write']) - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + stream = mock.Mock(spec=["write"]) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) total_bytes = 100 @@ -485,10 +493,14 @@ def test__process_response_body_wrong_length(self): assert download.bytes_downloaded == 0 assert download.total_bytes is None # Actually call the method to update. - data = b'not 10' + data = b"not 10" response = self._mock_response( - 0, chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT)) + 0, + chunk_size - 1, + total_bytes, + content=data, + status_code=int(http_client.PARTIAL_CONTENT), + ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -507,8 +519,7 @@ def test__process_response_body_wrong_length(self): def test__process_response_when_finished(self): chunk_size = 256 stream = io.BytesIO() - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) total_bytes = 200 @@ -518,10 +529,14 @@ def test__process_response_when_finished(self): assert download.bytes_downloaded == 0 assert download.total_bytes is None # Actually call the method to update. - data = b'abcd' * 50 # 4 * 50 == 200 + data = b"abcd" * 50 # 4 * 50 == 200 response = self._mock_response( - 0, total_bytes - 1, total_bytes, content=data, - status_code=int(http_client.OK)) + 0, + total_bytes - 1, + total_bytes, + content=data, + status_code=int(http_client.OK), + ) download._process_response(response) # Check internal state after. assert download.finished @@ -534,8 +549,7 @@ def test__process_response_when_reaching_end(self): chunk_size = 8192 end = 65000 stream = io.BytesIO() - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream, end=end) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream, end=end) _fix_up_virtual(download) download._bytes_downloaded = 7 * chunk_size @@ -547,10 +561,14 @@ def test__process_response_when_reaching_end(self): assert download.total_bytes == 8 * chunk_size # Actually call the method to update. expected_size = end - 7 * chunk_size + 1 - data = b'B' * expected_size + data = b"B" * expected_size response = self._mock_response( - 7 * chunk_size, end, 8 * chunk_size, content=data, - status_code=int(http_client.PARTIAL_CONTENT)) + 7 * chunk_size, + end, + 8 * chunk_size, + content=data, + status_code=int(http_client.PARTIAL_CONTENT), + ) download._process_response(response) # Check internal state after. assert download.finished @@ -561,17 +579,16 @@ def test__process_response_when_reaching_end(self): def test__process_response_when_content_range_is_zero(self): chunk_size = 10 - stream = mock.Mock(spec=[u'write']) - download = _download.ChunkedDownload( - EXAMPLE_URL, chunk_size, stream) + stream = mock.Mock(spec=[u"write"]) + download = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, stream) _fix_up_virtual(download) content_range = _download._ZERO_CONTENT_RANGE_HEADER - headers = {u'content-range': content_range} + headers = {u"content-range": content_range} status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE - response = mock.Mock(headers=headers, - status_code=status_code, - spec=[u'headers', 'status_code']) + response = mock.Mock( + headers=headers, status_code=status_code, spec=[u"headers", "status_code"] + ) download._process_response(response) stream.write.assert_not_called() assert download.finished @@ -583,11 +600,10 @@ def test_consume_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: download.consume_next_chunk(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class Test__add_bytes_range(object): - def test_do_nothing(self): headers = {} ret_val = _download.add_bytes_range(None, None, headers) @@ -598,39 +614,39 @@ def test_both_vals(self): headers = {} ret_val = _download.add_bytes_range(17, 1997, headers) assert ret_val is None - assert headers == {u'range': u'bytes=17-1997'} + assert headers == {u"range": u"bytes=17-1997"} def test_end_only(self): headers = {} ret_val = _download.add_bytes_range(None, 909, headers) assert ret_val is None - assert headers == {u'range': u'bytes=0-909'} + assert headers == {u"range": u"bytes=0-909"} def test_start_only(self): headers = {} ret_val = _download.add_bytes_range(3735928559, None, headers) assert ret_val is None - assert headers == {u'range': u'bytes=3735928559-'} + assert headers == {u"range": u"bytes=3735928559-"} def test_start_as_offset(self): headers = {} ret_val = _download.add_bytes_range(-123454321, None, headers) assert ret_val is None - assert headers == {u'range': u'bytes=-123454321'} + assert headers == {u"range": u"bytes=-123454321"} class Test_get_range_info(object): - @staticmethod def _make_response(content_range): - headers = {u'content-range': content_range} - return mock.Mock(headers=headers, spec=['headers']) + headers = {u"content-range": content_range} + return mock.Mock(headers=headers, spec=["headers"]) def _success_helper(self, **kwargs): - content_range = u'Bytes 7-11/42' + content_range = u"Bytes 7-11/42" response = self._make_response(content_range) start_byte, end_byte, total_bytes = _download.get_range_info( - response, _get_headers, **kwargs) + response, _get_headers, **kwargs + ) assert start_byte == 7 assert end_byte == 11 assert total_bytes == 42 @@ -644,7 +660,7 @@ def test_success_with_callback(self): callback.assert_not_called() def _failure_helper(self, **kwargs): - content_range = u'nope x-6/y' + content_range = u"nope x-6/y" response = self._make_response(content_range) with pytest.raises(common.InvalidResponse) as exc_info: _download.get_range_info(response, _get_headers, **kwargs) @@ -663,14 +679,14 @@ def test_failure_with_callback(self): callback.assert_called_once_with() def _missing_header_helper(self, **kwargs): - response = mock.Mock(headers={}, spec=['headers']) + response = mock.Mock(headers={}, spec=["headers"]) with pytest.raises(common.InvalidResponse) as exc_info: _download.get_range_info(response, _get_headers, **kwargs) error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u'content-range' + assert error.args[1] == u"content-range" def test_missing_header(self): self._missing_header_helper() @@ -682,34 +698,36 @@ def test_missing_header_with_callback(self): class Test__check_for_zero_content_range(object): - @staticmethod def _make_response(content_range, status_code): - headers = {u'content-range': content_range} - return mock.Mock(headers=headers, - status_code=status_code, - spec=[u'headers', 'status_code']) + headers = {u"content-range": content_range} + return mock.Mock( + headers=headers, status_code=status_code, spec=[u"headers", "status_code"] + ) def test_status_code_416_and_test_content_range_zero_both(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert _download._check_for_zero_content_range( - response, _get_status_code, _get_headers) + response, _get_status_code, _get_headers + ) def test_status_code_416_only(self): - content_range = u'bytes 2-5/3' + content_range = u"bytes 2-5/3" status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( - response, _get_status_code, _get_headers) + response, _get_status_code, _get_headers + ) def test_content_range_zero_only(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER status_code = http_client.OK response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( - response, _get_status_code, _get_headers) + response, _get_status_code, _get_headers + ) def _get_status_code(response): diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 19b14ef7..aaeeb5c3 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -26,14 +26,12 @@ def test_do_nothing(): class Test_header_required(object): - def _success_helper(self, **kwargs): - name = u'some-header' - value = u'The Right Hand Side' - headers = {name: value, u'other-name': u'other-value'} - response = mock.Mock(headers=headers, spec=[u'headers']) - result = _helpers.header_required( - response, name, _get_headers, **kwargs) + name = u"some-header" + value = u"The Right Hand Side" + headers = {name: value, u"other-name": u"other-value"} + response = mock.Mock(headers=headers, spec=[u"headers"]) + result = _helpers.header_required(response, name, _get_headers, **kwargs) assert result == value def test_success(self): @@ -45,8 +43,8 @@ def test_success_with_callback(self): callback.assert_not_called() def _failure_helper(self, **kwargs): - response = mock.Mock(headers={}, spec=[u'headers']) - name = u'any-name' + response = mock.Mock(headers={}, spec=[u"headers"]) + name = u"any-name" with pytest.raises(common.InvalidResponse) as exc_info: _helpers.header_required(response, name, _get_headers, **kwargs) @@ -65,7 +63,6 @@ def test_failure_with_callback(self): class Test_require_status_code(object): - @staticmethod def _get_status_code(response): return response.status_code @@ -81,7 +78,8 @@ def test_success(self): for value in acceptable: response = _make_response(value) status_code = _helpers.require_status_code( - response, status_codes, self._get_status_code) + response, status_codes, self._get_status_code + ) assert value == status_code def test_success_with_callback(self): @@ -89,7 +87,8 @@ def test_success_with_callback(self): response = _make_response(http_client.OK) callback = mock.Mock(spec=[]) status_code = _helpers.require_status_code( - response, status_codes, self._get_status_code, callback=callback) + response, status_codes, self._get_status_code, callback=callback + ) assert status_code == http_client.OK callback.assert_not_called() @@ -97,8 +96,7 @@ def test_failure(self): status_codes = (http_client.CREATED, http_client.NO_CONTENT) response = _make_response(http_client.OK) with pytest.raises(common.InvalidResponse) as exc_info: - _helpers.require_status_code( - response, status_codes, self._get_status_code) + _helpers.require_status_code(response, status_codes, self._get_status_code) error = exc_info.value assert error.response is response @@ -112,8 +110,8 @@ def test_failure_with_callback(self): callback = mock.Mock(spec=[]) with pytest.raises(common.InvalidResponse) as exc_info: _helpers.require_status_code( - response, status_codes, self._get_status_code, - callback=callback) + response, status_codes, self._get_status_code, callback=callback + ) error = exc_info.value assert error.response is response @@ -124,8 +122,7 @@ def test_failure_with_callback(self): class Test_calculate_retry_wait(object): - - @mock.patch(u'random.randint', return_value=125) + @mock.patch(u"random.randint", return_value=125) def test_past_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(70.0, 64.0) @@ -133,7 +130,7 @@ def test_past_limit(self, randint_mock): assert wait_time == 64.125 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u'random.randint', return_value=250) + @mock.patch(u"random.randint", return_value=250) def test_at_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(50.0, 50.0) @@ -141,7 +138,7 @@ def test_at_limit(self, randint_mock): assert wait_time == 50.25 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u'random.randint', return_value=875) + @mock.patch(u"random.randint", return_value=875) def test_under_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 33.0) @@ -151,7 +148,6 @@ def test_under_limit(self, randint_mock): class Test_wait_and_retry(object): - def test_success_no_retry(self): truthy = http_client.OK assert truthy not in _helpers.RETRYABLE @@ -159,14 +155,13 @@ def test_success_no_retry(self): func = mock.Mock(return_value=response, spec=[]) retry_strategy = common.RetryStrategy() - ret_val = _helpers.wait_and_retry( - func, _get_status_code, retry_strategy) + ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy) assert ret_val is response func.assert_called_once_with() - @mock.patch(u'time.sleep') - @mock.patch(u'random.randint') + @mock.patch(u"time.sleep") + @mock.patch(u"random.randint") def test_success_with_retry(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] @@ -176,13 +171,11 @@ def test_success_with_retry(self, randint_mock, sleep_mock): http_client.SERVICE_UNAVAILABLE, http_client.NOT_FOUND, ) - responses = [ - _make_response(status_code) for status_code in status_codes] + responses = [_make_response(status_code) for status_code in status_codes] func = mock.Mock(side_effect=responses, spec=[]) retry_strategy = common.RetryStrategy() - ret_val = _helpers.wait_and_retry( - func, _get_status_code, retry_strategy) + ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy) assert ret_val == responses[-1] assert status_codes[-1] not in _helpers.RETRYABLE @@ -198,8 +191,8 @@ def test_success_with_retry(self, randint_mock, sleep_mock): sleep_mock.assert_any_call(2.625) sleep_mock.assert_any_call(4.375) - @mock.patch(u'time.sleep') - @mock.patch(u'random.randint') + @mock.patch(u"time.sleep") + @mock.patch(u"random.randint") def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125] @@ -213,13 +206,11 @@ def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): http_client.GATEWAY_TIMEOUT, common.TOO_MANY_REQUESTS, ) - responses = [ - _make_response(status_code) for status_code in status_codes] + responses = [_make_response(status_code) for status_code in status_codes] func = mock.Mock(side_effect=responses, spec=[]) retry_strategy = common.RetryStrategy(max_cumulative_retry=100.0) - ret_val = _helpers.wait_and_retry( - func, _get_status_code, retry_strategy) + ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy) assert ret_val == responses[-1] assert status_codes[-1] in _helpers.RETRYABLE @@ -241,7 +232,7 @@ def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): def _make_response(status_code): - return mock.Mock(status_code=status_code, spec=[u'status_code']) + return mock.Mock(status_code=status_code, spec=[u"status_code"]) def _get_status_code(response): diff --git a/tests/unit/test__upload.py b/tests/unit/test__upload.py index 443ce0c4..f9efee2f 100644 --- a/tests/unit/test__upload.py +++ b/tests/unit/test__upload.py @@ -25,22 +25,24 @@ SIMPLE_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=media&name={OBJECT}') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=media&name={OBJECT}" +) MULTIPART_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=multipart') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=multipart" +) RESUMABLE_URL = ( - u'https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?' - u'uploadType=resumable') + u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" + u"uploadType=resumable" +) ONE_MB = 1024 * 1024 -BASIC_CONTENT = u'text/plain' -JSON_TYPE = u'application/json; charset=UTF-8' -JSON_TYPE_LINE = b'content-type: application/json; charset=UTF-8\r\n' +BASIC_CONTENT = u"text/plain" +JSON_TYPE = u"application/json; charset=UTF-8" +JSON_TYPE_LINE = b"content-type: application/json; charset=UTF-8\r\n" class TestUploadBase(object): - def test_constructor_defaults(self): upload = _upload.UploadBase(SIMPLE_URL) assert upload.upload_url == SIMPLE_URL @@ -49,7 +51,7 @@ def test_constructor_defaults(self): _check_retry_strategy(upload) def test_constructor_explicit(self): - headers = {u'spin': u'doctors'} + headers = {u"spin": u"doctors"} upload = _upload.UploadBase(SIMPLE_URL, headers=headers) assert upload.upload_url == SIMPLE_URL assert upload._headers is headers @@ -104,64 +106,61 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_status_code(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_headers(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_body(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class TestSimpleUpload(object): - def test__prepare_request_already_finished(self): upload = _upload.SimpleUpload(SIMPLE_URL) upload._finished = True with pytest.raises(ValueError) as exc_info: - upload._prepare_request(b'', None) + upload._prepare_request(b"", None) - exc_info.match(u'An upload can only be used once.') + exc_info.match(u"An upload can only be used once.") def test__prepare_request_non_bytes_data(self): upload = _upload.SimpleUpload(SIMPLE_URL) assert not upload.finished with pytest.raises(TypeError) as exc_info: - upload._prepare_request(u'', None) + upload._prepare_request(u"", None) - exc_info.match(u'must be bytes') + exc_info.match(u"must be bytes") def test__prepare_request(self): upload = _upload.SimpleUpload(SIMPLE_URL) - content_type = u'image/jpeg' - data = b'cheetos and eetos' - method, url, payload, headers = upload._prepare_request( - data, content_type) + content_type = u"image/jpeg" + data = b"cheetos and eetos" + method, url, payload, headers = upload._prepare_request(data, content_type) - assert method == u'POST' + assert method == u"POST" assert url == SIMPLE_URL assert payload == data - assert headers == {u'content-type': content_type} + assert headers == {u"content-type": content_type} def test__prepare_request_with_headers(self): - headers = {u'x-goog-cheetos': u'spicy'} + headers = {u"x-goog-cheetos": u"spicy"} upload = _upload.SimpleUpload(SIMPLE_URL, headers=headers) - content_type = u'image/jpeg' - data = b'some stuff' - method, url, payload, new_headers = upload._prepare_request( - data, content_type) + content_type = u"image/jpeg" + data = b"some stuff" + method, url, payload, new_headers = upload._prepare_request(data, content_type) - assert method == u'POST' + assert method == u"POST" assert url == SIMPLE_URL assert payload == data assert new_headers is headers - expected = {u'content-type': content_type, u'x-goog-cheetos': u'spicy'} + expected = {u"content-type": content_type, u"x-goog-cheetos": u"spicy"} assert headers == expected def test_transmit(self): @@ -169,45 +168,43 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class TestMultipartUpload(object): - def test__prepare_request_already_finished(self): upload = _upload.MultipartUpload(MULTIPART_URL) upload._finished = True with pytest.raises(ValueError): - upload._prepare_request(b'Hi', {}, BASIC_CONTENT) + upload._prepare_request(b"Hi", {}, BASIC_CONTENT) def test__prepare_request_non_bytes_data(self): - data = u'Nope not bytes.' + data = u"Nope not bytes." upload = _upload.MultipartUpload(MULTIPART_URL) with pytest.raises(TypeError): upload._prepare_request(data, {}, BASIC_CONTENT) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==3==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==3==") def _prepare_request_helper(self, mock_get_boundary, headers=None): upload = _upload.MultipartUpload(MULTIPART_URL, headers=headers) - data = b'Hi' - metadata = {u'Some': u'Stuff'} + data = b"Hi" + metadata = {u"Some": u"Stuff"} content_type = BASIC_CONTENT method, url, payload, new_headers = upload._prepare_request( - data, metadata, content_type) + data, metadata, content_type + ) - assert method == u'POST' + assert method == u"POST" assert url == MULTIPART_URL expected_payload = ( - b'--==3==\r\n' + - JSON_TYPE_LINE + - b'\r\n' + b"--==3==\r\n" + JSON_TYPE_LINE + b"\r\n" b'{"Some": "Stuff"}\r\n' - b'--==3==\r\n' - b'content-type: text/plain\r\n' - b'\r\n' - b'Hi\r\n' - b'--==3==--') + b"--==3==\r\n" + b"content-type: text/plain\r\n" + b"\r\n" + b"Hi\r\n" + b"--==3==--" + ) assert payload == expected_payload multipart_type = b'multipart/related; boundary="==3=="' mock_get_boundary.assert_called_once_with() @@ -216,17 +213,16 @@ def _prepare_request_helper(self, mock_get_boundary, headers=None): def test__prepare_request(self): headers, multipart_type = self._prepare_request_helper() - assert headers == {u'content-type': multipart_type} + assert headers == {u"content-type": multipart_type} def test__prepare_request_with_headers(self): - headers = {u'best': u'shirt', u'worst': u'hat'} - new_headers, multipart_type = self._prepare_request_helper( - headers=headers) + headers = {u"best": u"shirt", u"worst": u"hat"} + new_headers, multipart_type = self._prepare_request_helper(headers=headers) assert new_headers is headers expected_headers = { - u'best': u'shirt', - u'content-type': multipart_type, - u'worst': u'hat', + u"best": u"shirt", + u"content-type": multipart_type, + u"worst": u"hat", } assert expected_headers == headers @@ -235,11 +231,10 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None, None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") class TestResumableUpload(object): - def test_constructor(self): chunk_size = ONE_MB upload = _upload.ResumableUpload(RESUMABLE_URL, chunk_size) @@ -291,7 +286,7 @@ def test_resumable_url_property(self): assert upload.resumable_url is None # Make sure we cannot set it on public @property. - new_url = u'http://test.invalid?upload_id=not-none' + new_url = u"http://test.invalid?upload_id=not-none" with pytest.raises(AttributeError): upload.resumable_url = new_url @@ -325,14 +320,12 @@ def test_total_bytes_property(self): upload._total_bytes = 8192 assert upload.total_bytes == 8192 - def _prepare_initiate_request_helper(self, upload_headers=None, - **method_kwargs): - data = b'some really big big data.' + def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs): + data = b"some really big big data." stream = io.BytesIO(data) - metadata = {u'name': u'big-data-file.txt'} + metadata = {u"name": u"big-data-file.txt"} - upload = _upload.ResumableUpload( - RESUMABLE_URL, ONE_MB, headers=upload_headers) + upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=upload_headers) orig_headers = upload._headers.copy() # Check ``upload``-s state before. assert upload._stream is None @@ -340,19 +333,20 @@ def _prepare_initiate_request_helper(self, upload_headers=None, assert upload._total_bytes is None # Call the method and check the output. method, url, payload, headers = upload._prepare_initiate_request( - stream, metadata, BASIC_CONTENT, **method_kwargs) + stream, metadata, BASIC_CONTENT, **method_kwargs + ) assert payload == b'{"name": "big-data-file.txt"}' # Make sure the ``upload``-s state was updated. assert upload._stream == stream assert upload._content_type == BASIC_CONTENT - if method_kwargs == {u'stream_final': False}: + if method_kwargs == {u"stream_final": False}: assert upload._total_bytes is None else: assert upload._total_bytes == len(data) # Make sure headers are untouched. assert headers is not upload._headers assert upload._headers == orig_headers - assert method == u'POST' + assert method == u"POST" assert url == upload.upload_url # Make sure the stream is still at the beginning. assert stream.tell() == 0 @@ -362,51 +356,49 @@ def _prepare_initiate_request_helper(self, upload_headers=None, def test__prepare_initiate_request(self): data, headers = self._prepare_initiate_request_helper() expected_headers = { - u'content-type': JSON_TYPE, - u'x-upload-content-length': u'{:d}'.format(len(data)), - u'x-upload-content-type': BASIC_CONTENT, + u"content-type": JSON_TYPE, + u"x-upload-content-length": u"{:d}".format(len(data)), + u"x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_with_headers(self): - headers = {u'caviar': u'beluga', u'top': u'quark'} + headers = {u"caviar": u"beluga", u"top": u"quark"} data, new_headers = self._prepare_initiate_request_helper( - upload_headers=headers) + upload_headers=headers + ) expected_headers = { - u'caviar': u'beluga', - u'content-type': JSON_TYPE, - u'top': u'quark', - u'x-upload-content-length': u'{:d}'.format(len(data)), - u'x-upload-content-type': BASIC_CONTENT, + u"caviar": u"beluga", + u"content-type": JSON_TYPE, + u"top": u"quark", + u"x-upload-content-length": u"{:d}".format(len(data)), + u"x-upload-content-type": BASIC_CONTENT, } assert new_headers == expected_headers def test__prepare_initiate_request_known_size(self): total_bytes = 25 - data, headers = self._prepare_initiate_request_helper( - total_bytes=total_bytes) + data, headers = self._prepare_initiate_request_helper(total_bytes=total_bytes) assert len(data) == total_bytes expected_headers = { - u'content-type': u'application/json; charset=UTF-8', - u'x-upload-content-length': u'{:d}'.format(total_bytes), - u'x-upload-content-type': BASIC_CONTENT, + u"content-type": u"application/json; charset=UTF-8", + u"x-upload-content-length": u"{:d}".format(total_bytes), + u"x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_unknown_size(self): - _, headers = self._prepare_initiate_request_helper( - stream_final=False) + _, headers = self._prepare_initiate_request_helper(stream_final=False) expected_headers = { - u'content-type': u'application/json; charset=UTF-8', - u'x-upload-content-type': BASIC_CONTENT, + u"content-type": u"application/json; charset=UTF-8", + u"x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_already_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) # Fake that the upload has been started. - upload._resumable_url = ( - u'http://test.invalid?upload_id=definitely-started') + upload._resumable_url = u"http://test.invalid?upload_id=definitely-started" with pytest.raises(ValueError): upload._prepare_initiate_request(io.BytesIO(), {}, BASIC_CONTENT) @@ -414,7 +406,7 @@ def test__prepare_initiate_request_already_initiated(self): def test__prepare_initiate_request_bad_stream_position(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) - stream = io.BytesIO(b'data') + stream = io.BytesIO(b"data") stream.seek(1) with pytest.raises(ValueError): upload._prepare_initiate_request(stream, {}, BASIC_CONTENT) @@ -441,7 +433,7 @@ def test__process_initiate_response(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) _fix_up_virtual(upload) - headers = {u'location': u'http://test.invalid?upload_id=kmfeij3234'} + headers = {u"location": u"http://test.invalid?upload_id=kmfeij3234"} response = _make_response(headers=headers) # Check resumable_url before. assert upload._resumable_url is None @@ -449,14 +441,14 @@ def test__process_initiate_response(self): ret_val = upload._process_initiate_response(response) assert ret_val is None # Check resumable_url after. - assert upload._resumable_url == headers[u'location'] + assert upload._resumable_url == headers[u"location"] def test_initiate(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) with pytest.raises(NotImplementedError) as exc_info: upload.initiate(None, None, {}, BASIC_CONTENT) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__prepare_request_already_finished(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -465,7 +457,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.value.args == (u'Upload has finished.',) + assert exc_info.value.args == (u"Upload has finished.",) def test__prepare_request_invalid(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -474,8 +466,8 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u'invalid state') - assert exc_info.match(u'recover()') + assert exc_info.match(u"invalid state") + assert exc_info.match(u"recover()") def test__prepare_request_not_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -485,38 +477,37 @@ def test__prepare_request_not_initiated(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u'upload has not been initiated') - assert exc_info.match(u'initiate()') + assert exc_info.match(u"upload has not been initiated") + assert exc_info.match(u"initiate()") def test__prepare_request_invalid_stream_state(self): - stream = io.BytesIO(b'some data here') + stream = io.BytesIO(b"some data here") upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) upload._stream = stream - upload._resumable_url = u'http://test.invalid?upload_id=not-none' + upload._resumable_url = u"http://test.invalid?upload_id=not-none" # Make stream.tell() disagree with bytes_uploaded. upload._bytes_uploaded = 5 assert upload.bytes_uploaded != stream.tell() with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u'Bytes stream is in unexpected state.') + assert exc_info.match(u"Bytes stream is in unexpected state.") @staticmethod def _upload_in_flight(data, headers=None): - upload = _upload.ResumableUpload( - RESUMABLE_URL, ONE_MB, headers=headers) + upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=headers) upload._stream = io.BytesIO(data) upload._content_type = BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u'http://test.invalid?upload_id=not-none' + upload._resumable_url = u"http://test.invalid?upload_id=not-none" return upload def _prepare_request_helper(self, headers=None): - data = b'All of the data goes in a stream.' + data = b"All of the data goes in a stream." upload = self._upload_in_flight(data, headers=headers) method, url, payload, new_headers = upload._prepare_request() # Check the response values. - assert method == u'PUT' + assert method == u"PUT" assert url == upload.resumable_url assert payload == data # Make sure headers are **NOT** updated @@ -527,22 +518,22 @@ def _prepare_request_helper(self, headers=None): def test__prepare_request_success(self): headers = self._prepare_request_helper() expected_headers = { - u'content-range': u'bytes 0-32/33', - u'content-type': BASIC_CONTENT, + u"content-range": u"bytes 0-32/33", + u"content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_request_success_with_headers(self): - headers = {u'cannot': u'touch this'} + headers = {u"cannot": u"touch this"} new_headers = self._prepare_request_helper(headers) assert new_headers is not headers expected_headers = { - u'content-range': u'bytes 0-32/33', - u'content-type': BASIC_CONTENT, + u"content-range": u"bytes 0-32/33", + u"content-type": BASIC_CONTENT, } assert new_headers == expected_headers # Make sure the ``_headers`` are not incorporated. - assert u'cannot' not in new_headers + assert u"cannot" not in new_headers def test__make_invalid(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -582,10 +573,12 @@ def test__process_response_success(self): bytes_sent = 158 total_bytes = upload._bytes_uploaded + bytes_sent response_body = u'{{"size": "{:d}"}}'.format(total_bytes) - response_body = response_body.encode(u'utf-8') + response_body = response_body.encode(u"utf-8") response = mock.Mock( - content=response_body, status_code=http_client.OK, - spec=[u'content', u'status_code']) + content=response_body, + status_code=http_client.OK, + spec=[u"content", u"status_code"], + ) ret_val = upload._process_response(response, bytes_sent) assert ret_val is None # Check status after. @@ -596,8 +589,7 @@ def test__process_response_partial_no_range(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) _fix_up_virtual(upload) - response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT) + response = _make_response(status_code=resumable_media.PERMANENT_REDIRECT) # Make sure the upload is valid before the failure. assert not upload.invalid with pytest.raises(common.InvalidResponse) as exc_info: @@ -609,7 +601,7 @@ def test__process_response_partial_no_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u'range' + assert error.args[1] == u"range" def test__process_response_partial_bad_range(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -617,9 +609,10 @@ def test__process_response_partial_bad_range(self): # Make sure the upload is valid before the failure. assert not upload.invalid - headers = {u'range': u'nights 1-81'} + headers = {u"range": u"nights 1-81"} response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT, headers=headers) + status_code=resumable_media.PERMANENT_REDIRECT, headers=headers + ) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_response(response, 81) @@ -627,7 +620,7 @@ def test__process_response_partial_bad_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u'range'] + assert error.args[1] == headers[u"range"] # Make sure the upload is invalid after the failure. assert upload.invalid @@ -637,9 +630,10 @@ def test__process_response_partial(self): # Check status before. assert upload._bytes_uploaded == 0 - headers = {u'range': u'bytes=0-171'} + headers = {u"range": u"bytes=0-171"} response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT, headers=headers) + status_code=resumable_media.PERMANENT_REDIRECT, headers=headers + ) ret_val = upload._process_response(response, 172) assert ret_val is None # Check status after. @@ -650,7 +644,7 @@ def test_transmit_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit_next_chunk(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") def test__prepare_recover_request_not_invalid(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -664,28 +658,27 @@ def test__prepare_recover_request(self): upload._invalid = True method, url, payload, headers = upload._prepare_recover_request() - assert method == u'PUT' + assert method == u"PUT" assert url == upload.resumable_url assert payload is None - assert headers == {u'content-range': u'bytes */*'} + assert headers == {u"content-range": u"bytes */*"} # Make sure headers are untouched. assert upload._headers == {} def test__prepare_recover_request_with_headers(self): - headers = {u'lake': u'ocean'} - upload = _upload.ResumableUpload( - RESUMABLE_URL, ONE_MB, headers=headers) + headers = {u"lake": u"ocean"} + upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=headers) upload._invalid = True method, url, payload, new_headers = upload._prepare_recover_request() - assert method == u'PUT' + assert method == u"PUT" assert url == upload.resumable_url assert payload is None - assert new_headers == {u'content-range': u'bytes */*'} + assert new_headers == {u"content-range": u"bytes */*"} # Make sure the ``_headers`` are not incorporated. - assert u'lake' not in new_headers + assert u"lake" not in new_headers # Make sure headers are untouched. - assert upload._headers == {u'lake': u'ocean'} + assert upload._headers == {u"lake": u"ocean"} def test__process_recover_response_bad_status(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -710,12 +703,11 @@ def test__process_recover_response_no_range(self): _fix_up_virtual(upload) upload._invalid = True - upload._stream = mock.Mock(spec=[u'seek']) + upload._stream = mock.Mock(spec=[u"seek"]) upload._bytes_uploaded = mock.sentinel.not_zero assert upload.bytes_uploaded != 0 - response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT) + response = _make_response(status_code=resumable_media.PERMANENT_REDIRECT) ret_val = upload._process_recover_response(response) assert ret_val is None # Check the state of ``upload`` after. @@ -728,19 +720,20 @@ def test__process_recover_response_bad_range(self): _fix_up_virtual(upload) upload._invalid = True - upload._stream = mock.Mock(spec=[u'seek']) + upload._stream = mock.Mock(spec=[u"seek"]) upload._bytes_uploaded = mock.sentinel.not_zero - headers = {u'range': u'bites=9-11'} + headers = {u"range": u"bites=9-11"} response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT, headers=headers) + status_code=resumable_media.PERMANENT_REDIRECT, headers=headers + ) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_recover_response(response) error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u'range'] + assert error.args[1] == headers[u"range"] # Check the state of ``upload`` after (untouched). assert upload.bytes_uploaded is mock.sentinel.not_zero assert upload.invalid @@ -751,14 +744,15 @@ def test__process_recover_response_with_range(self): _fix_up_virtual(upload) upload._invalid = True - upload._stream = mock.Mock(spec=[u'seek']) + upload._stream = mock.Mock(spec=[u"seek"]) upload._bytes_uploaded = mock.sentinel.not_zero assert upload.bytes_uploaded != 0 end = 11 - headers = {u'range': u'bytes=0-{:d}'.format(end)} + headers = {u"range": u"bytes=0-{:d}".format(end)} response = _make_response( - status_code=resumable_media.PERMANENT_REDIRECT, headers=headers) + status_code=resumable_media.PERMANENT_REDIRECT, headers=headers + ) ret_val = upload._process_recover_response(response) assert ret_val is None # Check the state of ``upload`` after. @@ -771,69 +765,66 @@ def test_recover(self): with pytest.raises(NotImplementedError) as exc_info: upload.recover(None) - exc_info.match(u'virtual') + exc_info.match(u"virtual") -@mock.patch(u'random.randrange', return_value=1234567890123456789) +@mock.patch(u"random.randrange", return_value=1234567890123456789) def test_get_boundary(mock_rand): result = _upload.get_boundary() - assert result == b'===============1234567890123456789==' + assert result == b"===============1234567890123456789==" mock_rand.assert_called_once_with(sys.maxsize) class Test_construct_multipart_request(object): - - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==1==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==1==") def test_binary(self, mock_get_boundary): - data = b'By nary day tuh' - metadata = {u'name': u'hi-file.bin'} - content_type = u'application/octet-stream' + data = b"By nary day tuh" + metadata = {u"name": u"hi-file.bin"} + content_type = u"application/octet-stream" payload, multipart_boundary = _upload.construct_multipart_request( - data, metadata, content_type) + data, metadata, content_type + ) assert multipart_boundary == mock_get_boundary.return_value expected_payload = ( - b'--==1==\r\n' + - JSON_TYPE_LINE + - b'\r\n' + b"--==1==\r\n" + JSON_TYPE_LINE + b"\r\n" b'{"name": "hi-file.bin"}\r\n' - b'--==1==\r\n' - b'content-type: application/octet-stream\r\n' - b'\r\n' - b'By nary day tuh\r\n' - b'--==1==--') + b"--==1==\r\n" + b"content-type: application/octet-stream\r\n" + b"\r\n" + b"By nary day tuh\r\n" + b"--==1==--" + ) assert payload == expected_payload mock_get_boundary.assert_called_once_with() - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==2==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==2==") def test_unicode(self, mock_get_boundary): - data_unicode = u'\N{snowman}' + data_unicode = u"\N{snowman}" # construct_multipart_request( ASSUMES callers pass bytes. - data = data_unicode.encode(u'utf-8') - metadata = {u'name': u'snowman.txt'} + data = data_unicode.encode(u"utf-8") + metadata = {u"name": u"snowman.txt"} content_type = BASIC_CONTENT payload, multipart_boundary = _upload.construct_multipart_request( - data, metadata, content_type) + data, metadata, content_type + ) assert multipart_boundary == mock_get_boundary.return_value expected_payload = ( - b'--==2==\r\n' + - JSON_TYPE_LINE + - b'\r\n' + b"--==2==\r\n" + JSON_TYPE_LINE + b"\r\n" b'{"name": "snowman.txt"}\r\n' - b'--==2==\r\n' - b'content-type: text/plain\r\n' - b'\r\n' - b'\xe2\x98\x83\r\n' - b'--==2==--') + b"--==2==\r\n" + b"content-type: text/plain\r\n" + b"\r\n" + b"\xe2\x98\x83\r\n" + b"--==2==--" + ) assert payload == expected_payload mock_get_boundary.assert_called_once_with() def test_get_total_bytes(): - data = b'some data' + data = b"some data" stream = io.BytesIO(data) # Check position before function call. assert stream.tell() == 0 @@ -850,32 +841,29 @@ def test_get_total_bytes(): class Test_get_next_chunk(object): - def test_exhausted_known_size(self): - data = b'the end' + data = b"the end" stream = io.BytesIO(data) stream.seek(len(data)) with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, len(data)) - exc_info.match( - u'Stream is already exhausted. There is no content remaining.') + exc_info.match(u"Stream is already exhausted. There is no content remaining.") def test_exhausted_known_size_zero(self): - stream = io.BytesIO(b'') + stream = io.BytesIO(b"") answer = _upload.get_next_chunk(stream, 1, 0) - assert answer == (0, b'', 'bytes */0') + assert answer == (0, b"", "bytes */0") def test_exhausted_known_size_zero_nonempty(self): - stream = io.BytesIO(b'not empty WAT!') + stream = io.BytesIO(b"not empty WAT!") with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, 0) - exc_info.match( - u'Stream specified as empty, but produced non-empty content.') + exc_info.match(u"Stream specified as empty, but produced non-empty content.") def test_read_past_known_size(self): - data = b'more content than we expected' + data = b"more content than we expected" stream = io.BytesIO(data) chunk_size = len(data) total_bytes = chunk_size - 3 @@ -883,11 +871,11 @@ def test_read_past_known_size(self): with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, chunk_size, total_bytes) - exc_info.match(u'bytes have been read from the stream') - exc_info.match(u'exceeds the expected total') + exc_info.match(u"bytes have been read from the stream") + exc_info.match(u"exceeds the expected total") def test_success_known_size(self): - data = b'0123456789' + data = b"0123456789" stream = io.BytesIO(data) total_bytes = len(data) chunk_size = 3 @@ -896,21 +884,21 @@ def test_success_known_size(self): result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result3 = _upload.get_next_chunk(stream, chunk_size, total_bytes) - assert result0 == (0, b'012', u'bytes 0-2/10') - assert result1 == (3, b'345', u'bytes 3-5/10') - assert result2 == (6, b'678', u'bytes 6-8/10') - assert result3 == (9, b'9', u'bytes 9-9/10') + assert result0 == (0, b"012", u"bytes 0-2/10") + assert result1 == (3, b"345", u"bytes 3-5/10") + assert result2 == (6, b"678", u"bytes 6-8/10") + assert result3 == (9, b"9", u"bytes 9-9/10") assert stream.tell() == total_bytes def test_success_unknown_size(self): - data = b'abcdefghij' + data = b"abcdefghij" stream = io.BytesIO(data) chunk_size = 6 # Splits into 4 chunks: abcdef, ghij result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, b'abcdef', u'bytes 0-5/*') - assert result1 == (chunk_size, b'ghij', u'bytes 6-9/10') + assert result0 == (0, b"abcdef", u"bytes 0-5/*") + assert result1 == (chunk_size, b"ghij", u"bytes 6-9/10") assert stream.tell() == len(data) # Do the same when the chunk size evenly divides len(data) @@ -919,27 +907,26 @@ def test_success_unknown_size(self): # Splits into 2 chunks: `data` and empty string result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, data, u'bytes 0-9/*') - assert result1 == (len(data), b'', u'bytes */10') + assert result0 == (0, data, u"bytes 0-9/*") + assert result1 == (len(data), b"", u"bytes */10") assert stream.tell() == len(data) class Test_get_content_range(object): - def test_known_size(self): result = _upload.get_content_range(5, 10, 40) - assert result == u'bytes 5-10/40' + assert result == u"bytes 5-10/40" def test_unknown_size(self): result = _upload.get_content_range(1000, 10000, None) - assert result == u'bytes 1000-10000/*' + assert result == u"bytes 1000-10000/*" def _make_response(status_code=http_client.OK, headers=None): headers = headers or {} return mock.Mock( - headers=headers, status_code=status_code, - spec=[u'headers', u'status_code']) + headers=headers, status_code=status_code, spec=[u"headers", u"status_code"] + ) def _get_status_code(response): diff --git a/tests/unit/test_common.py b/tests/unit/test_common.py index 1c4a15ad..c48ba94e 100644 --- a/tests/unit/test_common.py +++ b/tests/unit/test_common.py @@ -19,23 +19,19 @@ class TestInvalidResponse(object): - def test_constructor(self): response = mock.sentinel.response - error = common.InvalidResponse( - response, 1, u'a', [b'm'], True) + error = common.InvalidResponse(response, 1, u"a", [b"m"], True) assert error.response is response - assert error.args == (1, u'a', [b'm'], True) + assert error.args == (1, u"a", [b"m"], True) class TestRetryStrategy(object): - def test_constructor_defaults(self): retry_strategy = common.RetryStrategy() assert retry_strategy.max_sleep == common.MAX_SLEEP - assert ( - retry_strategy.max_cumulative_retry == common.MAX_CUMULATIVE_RETRY) + assert retry_strategy.max_cumulative_retry == common.MAX_CUMULATIVE_RETRY assert retry_strategy.max_retries is None def test_constructor_failure(self): @@ -48,7 +44,8 @@ def test_constructor_explicit_bound_cumulative(self): max_sleep = 10.0 max_cumulative_retry = 100.0 retry_strategy = common.RetryStrategy( - max_sleep=max_sleep, max_cumulative_retry=max_cumulative_retry) + max_sleep=max_sleep, max_cumulative_retry=max_cumulative_retry + ) assert retry_strategy.max_sleep == max_sleep assert retry_strategy.max_cumulative_retry == max_cumulative_retry @@ -58,7 +55,8 @@ def test_constructor_explicit_bound_retries(self): max_sleep = 13.75 max_retries = 14 retry_strategy = common.RetryStrategy( - max_sleep=max_sleep, max_retries=max_retries) + max_sleep=max_sleep, max_retries=max_retries + ) assert retry_strategy.max_sleep == max_sleep assert retry_strategy.max_cumulative_retry is None