From 69f6de5a08ffa3ca18f7cd9e8b9edd59f25dad00 Mon Sep 17 00:00:00 2001 From: annatisch Date: Wed, 21 Aug 2019 06:41:46 +1200 Subject: [PATCH] [storage] Fix for Files upload return type (#6772) * Fix missing import * Fix for Files inconsistent return types * Blob async trace decorators --- .../azure/storage/blob/_shared/uploads.py | 4 +- .../storage/blob/_shared/uploads_async.py | 4 +- .../storage/blob/aio/blob_client_async.py | 28 +++++++++++ .../blob/aio/blob_service_client_async.py | 10 ++++ .../blob/aio/container_client_async.py | 15 ++++++ .../azure/storage/blob/aio/lease_async.py | 7 +++ .../azure/storage/blob/container_client.py | 1 - .../tests/test_container.py | 1 + .../azure/storage/file/_shared/uploads.py | 4 +- .../storage/file/_shared/uploads_async.py | 4 +- .../storage/file/aio/file_client_async.py | 3 +- .../azure/storage/file/file_client.py | 6 ++- .../azure-storage-file/tests/test_file.py | 48 ++++++++++++++----- .../tests/test_file_async.py | 43 ++++++++++++----- .../azure/storage/queue/_shared/uploads.py | 4 +- .../storage/queue/_shared/uploads_async.py | 4 +- 16 files changed, 147 insertions(+), 39 deletions(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py index 50b314361540..5060761d27fc 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads.py @@ -340,7 +340,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - self.service.upload_range( + response = self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -348,7 +348,7 @@ def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options ) - return 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response class SubStream(IOBase): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py index 861422855b8d..8aeafa39ef28 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/uploads_async.py @@ -338,7 +338,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method async def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - await self.service.upload_range( + response = await self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -347,4 +347,4 @@ async def _upload_chunk(self, chunk_offset, chunk_data): **self.request_options ) range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) - return range_id + return range_id, response diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_client_async.py index f6597df7d76c..5703f6a7fcf3 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_client_async.py @@ -10,6 +10,8 @@ TYPE_CHECKING ) +from azure.core.tracing.decorator_async import distributed_trace_async + from .._shared.base_client_async import AsyncStorageAccountHostsMixin from .._shared.policies_async import ExponentialRetry from .._shared.downloads_async import StorageStreamDownloader @@ -114,6 +116,7 @@ def __init__( self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline, loop=loop) self._loop = loop + @distributed_trace_async async def get_account_information(self, **kwargs): # type: ignore # type: (Optional[int]) -> Dict[str, str] """Gets information related to the storage account in which the blob resides. @@ -129,6 +132,7 @@ async def get_account_information(self, **kwargs): # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def upload_blob( self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] @@ -240,6 +244,7 @@ async def upload_blob( return await upload_page_blob(**options) return await upload_append_blob(**options) + @distributed_trace_async async def download_blob(self, offset=None, length=None, validate_content=False, **kwargs): # type: (Optional[int], Optional[int], bool, Any) -> Iterable[bytes] """Downloads a blob to a stream with automatic chunking. @@ -312,6 +317,7 @@ async def download_blob(self, offset=None, length=None, validate_content=False, await downloader.setup(extra_properties=extra_properties) return downloader + @distributed_trace_async async def delete_blob(self, delete_snapshots=False, **kwargs): # type: (bool, Any) -> None """Marks the specified blob for deletion. @@ -374,6 +380,7 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def undelete_blob(self, **kwargs): # type: (Any) -> None """Restores soft-deleted blobs or snapshots. @@ -398,6 +405,7 @@ async def undelete_blob(self, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_blob_properties(self, **kwargs): # type: (Any) -> BlobProperties """Returns all user-defined metadata, standard HTTP properties, and @@ -461,6 +469,7 @@ async def get_blob_properties(self, **kwargs): blob_props.container = self.container_name return blob_props # type: ignore + @distributed_trace_async async def set_http_headers(self, content_settings=None, **kwargs): # type: (Optional[ContentSettings], Any) -> None """Sets system properties on the blob. @@ -505,6 +514,7 @@ async def set_http_headers(self, content_settings=None, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def set_blob_metadata(self, metadata=None, **kwargs): # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] """Sets user-defined metadata for the blob as one or more name-value pairs. @@ -549,6 +559,7 @@ async def set_blob_metadata(self, metadata=None, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def create_page_blob( # type: ignore self, size, # type: int content_settings=None, # type: Optional[ContentSettings] @@ -619,6 +630,7 @@ async def create_page_blob( # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def create_append_blob(self, content_settings=None, metadata=None, **kwargs): # type: (Optional[ContentSettings], Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] """Creates a new Append Blob. @@ -667,6 +679,7 @@ async def create_append_blob(self, content_settings=None, metadata=None, **kwarg except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def create_snapshot(self, metadata=None, **kwargs): # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] """Creates a snapshot of the blob. @@ -726,6 +739,7 @@ async def create_snapshot(self, metadata=None, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): # type: (str, Optional[Dict[str, str]], bool, Any) -> Any """Copies a blob asynchronously. @@ -877,6 +891,7 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def abort_copy(self, copy_id, **kwargs): # type: (Union[str, BlobProperties], Any) -> None """Abort an ongoing copy operation. @@ -904,6 +919,7 @@ async def abort_copy(self, copy_id, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): # type: (int, Optional[str], Optional[int], Any) -> LeaseClient """Requests a new lease. @@ -958,6 +974,7 @@ async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): await lease.acquire(lease_duration=lease_duration, **kwargs) return lease + @distributed_trace_async async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): # type: (Union[str, StandardBlobTier], Any) -> None """This operation sets the tier on a block blob. @@ -993,6 +1010,7 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def stage_block( self, block_id, # type: str data, # type: Union[Iterable[AnyStr], IO[AnyStr]] @@ -1039,6 +1057,7 @@ async def stage_block( except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def stage_block_from_url( self, block_id, # type: str source_url, # type: str @@ -1083,6 +1102,7 @@ async def stage_block_from_url( except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_block_list(self, block_list_type="committed", **kwargs): # type: (Optional[str], Any) -> Tuple[List[BlobBlock], List[BlobBlock]] """The Get Block List operation retrieves the list of blocks that have @@ -1113,6 +1133,7 @@ async def get_block_list(self, block_list_type="committed", **kwargs): process_storage_error(error) return self._get_block_list_result(blocks) + @distributed_trace_async async def commit_block_list( # type: ignore self, block_list, # type: List[BlobBlock] content_settings=None, # type: Optional[ContentSettings] @@ -1179,6 +1200,7 @@ async def commit_block_list( # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): # type: (Union[str, PremiumPageBlobTier], Optional[int], Optional[Union[LeaseClient, str]], **Any) -> None """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. @@ -1210,6 +1232,7 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_page_ranges( # type: ignore self, start_range=None, # type: Optional[int] end_range=None, # type: Optional[int] @@ -1284,6 +1307,7 @@ async def get_page_ranges( # type: ignore process_storage_error(error) return self._get_page_ranges_result(ranges) + @distributed_trace_async async def set_sequence_number( # type: ignore self, sequence_number_action, # type: Union[str, SequenceNumberAction] sequence_number=None, # type: Optional[str] @@ -1336,6 +1360,7 @@ async def set_sequence_number( # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def resize_blob(self, size, **kwargs): # type: (int, Any) -> Dict[str, Union[str, datetime]] """Resizes a page blob to the specified size. @@ -1381,6 +1406,7 @@ async def resize_blob(self, size, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def upload_page( # type: ignore self, page, # type: bytes start_range, # type: int @@ -1466,6 +1492,7 @@ async def upload_page( # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def clear_page(self, start_range, end_range, **kwargs): # type: (int, int) -> Dict[str, Union[str, datetime]] """Clears a range of pages. @@ -1525,6 +1552,7 @@ async def clear_page(self, start_range, end_range, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def append_block( # type: ignore self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] length=None, # type: Optional[int] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_service_client_async.py index b250c65ff96f..a6bbd7c46753 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_service_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/blob_service_client_async.py @@ -10,7 +10,10 @@ TYPE_CHECKING ) +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.async_paging import AsyncItemPaged + from .._shared.models import LocationMode from .._shared.policies_async import ExponentialRetry from .._shared.base_client_async import AsyncStorageAccountHostsMixin @@ -107,6 +110,7 @@ def __init__( self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline, loop=loop) self._loop = loop + @distributed_trace_async async def get_account_information(self, **kwargs): # type: ignore # type: (Optional[int]) -> Dict[str, str] """Gets information related to the storage account. @@ -130,6 +134,7 @@ async def get_account_information(self, **kwargs): # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_service_stats(self, timeout=None, **kwargs): # type: ignore # type: (Optional[int], **Any) -> Dict[str, Any] """Retrieves statistics related to replication for the Blob service. @@ -169,6 +174,7 @@ async def get_service_stats(self, timeout=None, **kwargs): # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_service_properties(self, timeout=None, **kwargs): # type(Optional[int]) -> Dict[str, Any] """Gets the properties of a storage account's Blob service, including @@ -191,6 +197,7 @@ async def get_service_properties(self, timeout=None, **kwargs): except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def set_service_properties( self, logging=None, # type: Optional[Logging] hour_metrics=None, # type: Optional[Metrics] @@ -267,6 +274,7 @@ async def set_service_properties( except StorageErrorException as error: process_storage_error(error) + @distributed_trace def list_containers( self, name_starts_with=None, # type: Optional[str] include_metadata=False, # type: Optional[bool] @@ -316,6 +324,7 @@ def list_containers( page_iterator_class=ContainerPropertiesPaged ) + @distributed_trace_async async def create_container( self, name, # type: str metadata=None, # type: Optional[Dict[str, str]] @@ -355,6 +364,7 @@ async def create_container( metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) return container + @distributed_trace_async async def delete_container( self, container, # type: Union[ContainerProperties, str] lease=None, # type: Optional[Union[LeaseClient, str]] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/container_client_async.py index 7f1bfb5189b5..47d7a8291d04 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/container_client_async.py @@ -10,7 +10,10 @@ TYPE_CHECKING ) +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.async_paging import AsyncItemPaged + from .._shared.base_client_async import AsyncStorageAccountHostsMixin from .._shared.policies_async import ExponentialRetry from .._shared.request_handlers import add_metadata_headers, serialize_iso @@ -112,6 +115,7 @@ def __init__( self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline, loop=loop) self._loop = loop + @distributed_trace_async async def create_container(self, metadata=None, public_access=None, timeout=None, **kwargs): # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], Optional[int], **Any) -> None """ @@ -148,6 +152,7 @@ async def create_container(self, metadata=None, public_access=None, timeout=None except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def delete_container( self, lease=None, # type: Optional[Union[LeaseClient, str]] timeout=None, # type: Optional[int] @@ -210,6 +215,7 @@ async def delete_container( except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def acquire_lease( self, lease_duration=-1, # type: int lease_id=None, # type: Optional[str] @@ -267,6 +273,7 @@ async def acquire_lease( await lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) return lease + @distributed_trace_async async def get_account_information(self, **kwargs): # type: ignore # type: (**Any) -> Dict[str, str] """Gets information related to the storage account. @@ -282,6 +289,7 @@ async def get_account_information(self, **kwargs): # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_container_properties(self, lease=None, timeout=None, **kwargs): # type: (Optional[Union[LeaseClient, str]], Optional[int], **Any) -> ContainerProperties """Returns all user-defined metadata and system properties for the specified @@ -315,6 +323,7 @@ async def get_container_properties(self, lease=None, timeout=None, **kwargs): response.name = self.container_name return response # type: ignore + @distributed_trace_async async def set_container_metadata( # type: ignore self, metadata=None, # type: Optional[Dict[str, str]] lease=None, # type: Optional[Union[str, LeaseClient]] @@ -367,6 +376,7 @@ async def set_container_metadata( # type: ignore except StorageErrorException as error: process_storage_error(error) + @distributed_trace_async async def get_container_access_policy(self, lease=None, timeout=None, **kwargs): # type: (Optional[Union[LeaseClient, str]], Optional[int], **Any) -> Dict[str, Any] """Gets the permissions for the specified container. @@ -402,6 +412,7 @@ async def get_container_access_policy(self, lease=None, timeout=None, **kwargs): 'signed_identifiers': identifiers or [] } + @distributed_trace_async async def set_container_access_policy( self, signed_identifiers=None, # type: Optional[Dict[str, Optional[AccessPolicy]]] public_access=None, # type: Optional[Union[str, PublicAccess]] @@ -477,6 +488,7 @@ async def set_container_access_policy( except StorageErrorException as error: process_storage_error(error) + @distributed_trace def list_blobs(self, name_starts_with=None, include=None, timeout=None, **kwargs): # type: (Optional[str], Optional[Any], Optional[int], **Any) -> AsyncItemPaged[BlobProperties] """Returns a generator to list the blobs under the specified container. @@ -518,6 +530,7 @@ def list_blobs(self, name_starts_with=None, include=None, timeout=None, **kwargs page_iterator_class=BlobPropertiesPaged ) + @distributed_trace def walk_blobs( self, name_starts_with=None, # type: Optional[str] include=None, # type: Optional[Any] @@ -563,6 +576,7 @@ def walk_blobs( results_per_page=results_per_page, delimiter=delimiter) + @distributed_trace_async async def upload_blob( self, name, # type: Union[str, BlobProperties] data, # type: Union[Iterable[AnyStr], IO[AnyStr]] @@ -681,6 +695,7 @@ async def upload_blob( ) return blob + @distributed_trace_async async def delete_blob( self, blob, # type: Union[str, BlobProperties] delete_snapshots=None, # type: Optional[str] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/lease_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/lease_async.py index b0a5e6693d7e..c372eb045b94 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/lease_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/lease_async.py @@ -9,6 +9,8 @@ TypeVar, TYPE_CHECKING ) +from azure.core.tracing.decorator_async import distributed_trace_async + from .._shared.response_handlers import return_response_headers, process_storage_error from .._generated.models import ( StorageErrorException, @@ -59,6 +61,7 @@ async def __aenter__(self): async def __aexit__(self, *args): await self.release() + @distributed_trace_async async def acquire(self, lease_duration=-1, timeout=None, **kwargs): # type: (int, Optional[int], Any) -> None """Requests a new lease. @@ -115,6 +118,7 @@ async def acquire(self, lease_duration=-1, timeout=None, **kwargs): self.last_modified = response.get('last_modified') # type: datetime self.etag = kwargs.get('etag') # type: str + @distributed_trace_async async def renew(self, timeout=None, **kwargs): # type: (Optional[int], Any) -> None """Renews the lease. @@ -168,6 +172,7 @@ async def renew(self, timeout=None, **kwargs): self.id = response.get('lease_id') # type: str self.last_modified = response.get('last_modified') # type: datetime + @distributed_trace_async async def release(self, timeout=None, **kwargs): # type: (Optional[int], Any) -> None """Release the lease. @@ -219,6 +224,7 @@ async def release(self, timeout=None, **kwargs): self.id = response.get('lease_id') # type: str self.last_modified = response.get('last_modified') # type: datetime + @distributed_trace_async async def change(self, proposed_lease_id, timeout=None, **kwargs): # type: (str, Optional[int], Any) -> None """Change the lease ID of an active lease. @@ -270,6 +276,7 @@ async def change(self, proposed_lease_id, timeout=None, **kwargs): self.id = response.get('lease_id') # type: str self.last_modified = response.get('last_modified') # type: datetime + @distributed_trace_async async def break_lease(self, lease_break_period=None, timeout=None, **kwargs): # type: (Optional[int], Optional[int], Any) -> int """Break the lease, if the container or blob has an active lease. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/container_client.py index c2756c4f8fb9..19ec183a142d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/container_client.py @@ -689,7 +689,6 @@ def list_blobs(self, name_starts_with=None, include=None, timeout=None, **kwargs command, prefix=name_starts_with, results_per_page=results_per_page, page_iterator_class=BlobPropertiesPaged) - @distributed_trace def walk_blobs( self, name_starts_with=None, # type: Optional[str] diff --git a/sdk/storage/azure-storage-blob/tests/test_container.py b/sdk/storage/azure-storage-blob/tests/test_container.py index ce0f9c8f9417..89a7b61bfba5 100644 --- a/sdk/storage/azure-storage-blob/tests/test_container.py +++ b/sdk/storage/azure-storage-blob/tests/test_container.py @@ -6,6 +6,7 @@ # license information. # -------------------------------------------------------------------------- import pytest +import unittest from dateutil.tz import tzutc import requests diff --git a/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads.py b/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads.py index 50b314361540..5060761d27fc 100644 --- a/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads.py +++ b/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads.py @@ -340,7 +340,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - self.service.upload_range( + response = self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -348,7 +348,7 @@ def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options ) - return 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response class SubStream(IOBase): diff --git a/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads_async.py b/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads_async.py index 861422855b8d..8aeafa39ef28 100644 --- a/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads_async.py +++ b/sdk/storage/azure-storage-file/azure/storage/file/_shared/uploads_async.py @@ -338,7 +338,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method async def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - await self.service.upload_range( + response = await self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -347,4 +347,4 @@ async def _upload_chunk(self, chunk_offset, chunk_data): **self.request_options ) range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) - return range_id + return range_id, response diff --git a/sdk/storage/azure-storage-file/azure/storage/file/aio/file_client_async.py b/sdk/storage/azure-storage-file/azure/storage/file/aio/file_client_async.py index 0795ad55fe6b..fb0eef292787 100644 --- a/sdk/storage/azure-storage-file/azure/storage/file/aio/file_client_async.py +++ b/sdk/storage/azure-storage-file/azure/storage/file/aio/file_client_async.py @@ -56,7 +56,7 @@ async def _upload_file_helper( if size == 0: return response - return await upload_data_chunks( + responses = await upload_data_chunks( service=client, uploader_class=FileChunkUploader, total_size=size, @@ -67,6 +67,7 @@ async def _upload_file_helper( timeout=timeout, **kwargs ) + return sorted(responses, key=lambda r: r.get('last_modified'))[-1] except StorageErrorException as error: process_storage_error(error) diff --git a/sdk/storage/azure-storage-file/azure/storage/file/file_client.py b/sdk/storage/azure-storage-file/azure/storage/file/file_client.py index 9ff3cf9b22e1..c11702d205b5 100644 --- a/sdk/storage/azure-storage-file/azure/storage/file/file_client.py +++ b/sdk/storage/azure-storage-file/azure/storage/file/file_client.py @@ -64,7 +64,7 @@ def _upload_file_helper( if size == 0: return response - return upload_data_chunks( + responses = upload_data_chunks( service=client, uploader_class=FileChunkUploader, total_size=size, @@ -73,7 +73,9 @@ def _upload_file_helper( max_connections=max_connections, validate_content=validate_content, timeout=timeout, - **kwargs) + **kwargs + ) + return sorted(responses, key=lambda r: r.get('last_modified'))[-1] except StorageErrorException as error: process_storage_error(error) diff --git a/sdk/storage/azure-storage-file/tests/test_file.py b/sdk/storage/azure-storage-file/tests/test_file.py index 2d33244d40d0..00af637fd107 100644 --- a/sdk/storage/azure-storage-file/tests/test_file.py +++ b/sdk/storage/azure-storage-file/tests/test_file.py @@ -833,7 +833,8 @@ def test_create_file_from_bytes_with_progress(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act progress = [] @@ -843,7 +844,10 @@ def callback(response): if current is not None: progress.append((current, total)) - file_client.upload_file(data, max_connections=2, raw_response_hook=callback) + response = file_client.upload_file(data, max_connections=2, raw_response_hook=callback) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data) @@ -861,10 +865,14 @@ def test_create_file_from_bytes_with_index(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act - file_client.upload_file(data[index:], max_connections=2) + response = file_client.upload_file(data[index:], max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data[1024:]) @@ -883,10 +891,14 @@ def test_create_file_from_bytes_with_index_and_count(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act - file_client.upload_file(data[index:], length=count, max_connections=2) + response = file_client.upload_file(data[index:], length=count, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data[index:index + count]) @@ -905,11 +917,15 @@ def test_create_file_from_path(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act with open(INPUT_FILE_PATH, 'rb') as stream: - file_client.upload_file(stream, max_connections=2) + response = file_client.upload_file(stream, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data) @@ -940,7 +956,10 @@ def callback(response): progress.append((current, total)) with open(INPUT_FILE_PATH, 'rb') as stream: - file_client.upload_file(stream, max_connections=2, raw_response_hook=callback) + response = file_client.upload_file(stream, max_connections=2, raw_response_hook=callback) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data) @@ -963,12 +982,16 @@ def test_create_file_from_stream(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act file_size = len(data) with open(INPUT_FILE_PATH, 'rb') as stream: - file_client.upload_file(stream, max_connections=2) + response = file_client.upload_file(stream, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert self.assertFileEqual(file_client, data[:file_size]) @@ -987,7 +1010,8 @@ def test_create_file_from_stream_non_seekable(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act file_size = len(data) diff --git a/sdk/storage/azure-storage-file/tests/test_file_async.py b/sdk/storage/azure-storage-file/tests/test_file_async.py index 5a2187b3d33c..3d8632191c79 100644 --- a/sdk/storage/azure-storage-file/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file/tests/test_file_async.py @@ -1044,7 +1044,8 @@ async def _test_create_file_from_bytes_with_progress_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act progress = [] @@ -1078,10 +1079,14 @@ async def _test_create_file_from_bytes_with_index_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act - await file_client.upload_file(data[index:], max_connections=2) + response = await file_client.upload_file(data[index:], max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert await self.assertFileEqual(file_client, data[1024:]) @@ -1106,10 +1111,14 @@ async def _test_create_file_from_bytes_with_index_and_count_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act - await file_client.upload_file(data[index:], length=count, max_connections=2) + response = await file_client.upload_file(data[index:], length=count, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert await self.assertFileEqual(file_client, data[index:index + count]) @@ -1134,11 +1143,15 @@ async def _test_create_file_from_path_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act with open(INPUT_FILE_PATH, 'rb') as stream: - await file_client.upload_file(stream, max_connections=2) + response = await file_client.upload_file(stream, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert await self.assertFileEqual(file_client, data) @@ -1175,7 +1188,10 @@ def callback(response): progress.append((current, total)) with open(INPUT_FILE_PATH, 'rb') as stream: - await file_client.upload_file(stream, max_connections=2, raw_response_hook=callback) + response = await file_client.upload_file(stream, max_connections=2, raw_response_hook=callback) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert await self.assertFileEqual(file_client, data) @@ -1204,12 +1220,16 @@ async def _test_create_file_from_stream_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act file_size = len(data) with open(INPUT_FILE_PATH, 'rb') as stream: - await file_client.upload_file(stream, max_connections=2) + response = await file_client.upload_file(stream, max_connections=2) + assert isinstance(response, dict) + assert 'last_modified' in response + assert 'etag' in response # Assert await self.assertFileEqual(file_client, data[:file_size]) @@ -1234,7 +1254,8 @@ async def _test_create_file_from_stream_non_seekable_async(self): self.get_file_url(), share=self.share_name, file_path=file_name, - credential=self.settings.STORAGE_ACCOUNT_KEY) + credential=self.settings.STORAGE_ACCOUNT_KEY, + max_range_size=4 * 1024) # Act file_size = len(data) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py index 50b314361540..5060761d27fc 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py @@ -340,7 +340,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - self.service.upload_range( + response = self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -348,7 +348,7 @@ def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options ) - return 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response class SubStream(IOBase): diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py index 861422855b8d..8aeafa39ef28 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py @@ -338,7 +338,7 @@ class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method async def _upload_chunk(self, chunk_offset, chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - await self.service.upload_range( + response = await self.service.upload_range( chunk_data, chunk_offset, chunk_end, @@ -347,4 +347,4 @@ async def _upload_chunk(self, chunk_offset, chunk_data): **self.request_options ) range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) - return range_id + return range_id, response