Skip to content

Commit

Permalink
[Storage][Blob][Fix]fix get_block_list bug (#18751)
Browse files Browse the repository at this point in the history
  • Loading branch information
xiafu-msft authored Jun 28, 2021
1 parent 88a742c commit ee0af9b
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 5 deletions.
12 changes: 10 additions & 2 deletions sdk/storage/azure-storage-blob/azure/storage/blob/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from azure.core.exceptions import HttpResponseError
from ._generated.models import ArrowField

from ._shared import decode_base64_to_text
from ._shared import decode_base64_to_bytes
from ._shared.response_handlers import return_context_and_deserialized, process_storage_error
from ._shared.models import DictMixin, get_enum_value
from ._generated.models import Logging as GeneratedLogging
Expand Down Expand Up @@ -739,7 +739,15 @@ def __init__(self, block_id, state=BlockState.Latest):

@classmethod
def _from_generated(cls, generated):
block = cls(decode_base64_to_text(generated.name))
try:
decoded_bytes = decode_base64_to_bytes(generated.name)
block_id = decoded_bytes.decode('utf-8')
# this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded
# while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it
# means we didn't base64 encode it when stage the block, we want to use the returned block_id directly.
except UnicodeDecodeError:
block_id = generated.name
block = cls(block_id)
block.size = generated.size
return block

Expand Down
10 changes: 8 additions & 2 deletions sdk/storage/azure-storage-blob/tests/test_large_block_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,10 @@ def _setup(self, storage_account, key):
self.container_name = self.get_resource_name('utcontainer')

if self.is_live:
self.bsc.create_container(self.container_name)
try:
self.bsc.create_container(self.container_name)
except:
pass

def _teardown(self, file_name):
if path.isfile(file_name):
Expand Down Expand Up @@ -170,9 +173,12 @@ def test_create_large_blob_from_path(self, resource_group, location, storage_acc

# Act
with open(FILE_PATH, 'rb') as stream:
blob.upload_blob(stream, max_concurrency=2)
blob.upload_blob(stream, max_concurrency=2, overwrite=True)

block_list = blob.get_block_list()

# Assert
self.assertIsNot(len(block_list), 0)
self.assertBlobEqual(self.container_name, blob_name, data)
self._teardown(FILE_PATH)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,12 @@ async def test_create_large_blob_from_path_async(self, resource_group, location,
# Act
try:
with open(FILE_PATH, 'rb') as stream:
await blob.upload_blob(stream, max_concurrency=2)
await blob.upload_blob(stream, max_concurrency=2, overwrite=True)

block_list = await blob.get_block_list()

# Assert
self.assertIsNot(len(block_list), 0)
await self.assertBlobEqual(self.container_name, blob_name, data)
finally:
self._teardown(FILE_PATH)
Expand Down

0 comments on commit ee0af9b

Please sign in to comment.