From 73d0b36cf9c5ded2c5a642a48e9cf879f06455b5 Mon Sep 17 00:00:00 2001 From: Xiaoxi Fu <49707495+xiafu-msft@users.noreply.github.com> Date: Thu, 22 Apr 2021 08:46:15 -0700 Subject: [PATCH] [Blob]Edit Test for Single Thread Substream Retry (#18206) Set block size to a smaller value --- .../azure-storage-blob/tests/test_largest_block_blob.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py index e00abbf91ca8..d78bd960761a 100644 --- a/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_largest_block_blob.py @@ -28,6 +28,8 @@ LARGEST_BLOCK_SIZE = 4000 * 1024 * 1024 LARGEST_SINGLE_UPLOAD_SIZE = 5000 * 1024 * 1024 +LARGE_BLOCK_SIZE = 100 * 1024 * 1024 + # ------------------------------------------------------------------------------ if platform.python_implementation() == 'PyPy': pytest.skip("Skip tests for Pypy", allow_module_level=True) @@ -212,7 +214,7 @@ def test_create_largest_blob_from_path(self, resource_group, location, storage_a def test_substream_for_single_thread_upload_large_block(self): FILE_PATH = 'largest_blob_from_path.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: - largeStream = LargeStream(LARGEST_BLOCK_SIZE, 100 * 1024 * 1024) + largeStream = LargeStream(LARGE_BLOCK_SIZE, 4 * 1024 * 1024) chunk = largeStream.read() while chunk: stream.write(chunk)