From 029bf31ec62a059881ea47e59536d0050105221f Mon Sep 17 00:00:00 2001 From: Sean McCullough <44180881+seanmcc-msft@users.noreply.github.com> Date: Wed, 3 Jun 2020 16:24:45 -0700 Subject: [PATCH] Added Change Feed (#11692) --- eng/Packages.Data.props | 1 + .../BreakingChanges.txt | 6 + .../CHANGELOG.md | 4 + .../Azure.Storage.Blobs.ChangeFeed/README.md | 148 +++ ...rage.Blobs.ChangeFeed.Samples.Tests.csproj | 31 + .../samples/README.md | 15 + .../samples/Sample01a_HelloWorld.cs | 115 ++ .../samples/Sample01b_HelloWorldAsync.cs | 121 ++ .../src/AssemblyInfo.cs | 13 + .../src/AvroReaderFactory.cs | 29 + .../src/Azure.Storage.Blobs.ChangeFeed.csproj | 48 + .../src/BlobChangeFeedAsyncPagable.cs | 89 ++ .../src/BlobChangeFeedClient.cs | 219 ++++ .../src/BlobChangeFeedExtensions.cs | 168 +++ .../src/BlobChangeFeedPagable.cs | 80 ++ .../src/ChangeFeed.cs | 256 ++++ .../src/ChangeFeedFactory.cs | 204 ++++ .../src/Chunk.cs | 69 ++ .../src/ChunkFactory.cs | 70 ++ .../src/LazyLoadingBlobStream.cs | 246 ++++ .../src/LazyLoadingBlobStreamFactory.cs | 21 + .../src/Models/BlobChangeFeedEvent.cs | 93 ++ .../src/Models/BlobChangeFeedEventData.cs | 129 ++ .../src/Models/BlobChangeFeedEventPage.cs | 30 + .../src/Models/BlobChangeFeedEventType.cs | 21 + .../src/Models/BlobChangeFeedModelFactory.cs | 72 ++ .../src/Models/ChangeFeedCursor.cs | 48 + .../src/Models/SegmentCursor.cs | 42 + .../src/Models/ShardCursor.cs | 44 + .../src/Segment.cs | 131 +++ .../src/SegmentFactory.cs | 94 ++ .../src/Shard.cs | 103 ++ .../src/ShardFactory.cs | 93 ++ ...zure.Storage.Blobs.ChangeFeed.Tests.csproj | 25 + .../tests/BlobChangeFeedAsyncPagableTests.cs | 89 ++ .../tests/BlobChangeFeedExtensionsTests.cs | 157 +++ .../tests/BlobChangeFeedPagableTests.cs | 35 + .../tests/ChangeFeedFactoryTests.cs | 68 ++ .../tests/ChangeFeedTestBase.cs | 173 +++ .../tests/ChangeFeedTests.cs | 1037 +++++++++++++++++ .../tests/ChunkTests.cs | 266 +++++ .../tests/LazyLoadingBlobStreamTests.cs | 98 ++ .../tests/Resources/ChangeFeedManifest.json | 12 + .../tests/Resources/SegmentManifest.json | 26 + .../tests/SegmentTests.cs | 271 +++++ .../GetSegmentsInYearTest.json | 4 + .../GetSegmentsInYearTestAsync.json | 4 + .../RoundDownToNearestHourTests.json | 4 + .../RoundDownToNearestHourTestsAsync.json | 4 + .../RoundDownToNearestYearTests.json | 4 + .../RoundDownToNearestYearTestsAsync.json | 4 + .../RoundUpToNearestHourTests.json | 4 + .../RoundUpToNearestHourTestsAsync.json | 4 + .../ToDateTimeOffsetTests.json | 4 + .../ToDateTimeOffsetTestsAsync.json | 4 + .../GetYearPathsTest.json | 4 + .../GetYearPathsTestAsync.json | 4 + .../ChangeFeedTests/GetCursor.json | 4 + .../ChangeFeedTests/GetCursorAsync.json | 4 + .../ChangeFeedTests/GetPage.json | 4 + .../ChangeFeedTests/GetPageAsync.json | 4 + .../GetSegmentsInYearTest.json | 4 + .../GetSegmentsInYearTestAsync.json | 4 + .../ChangeFeedTests/GetYearPathsTest.json | 4 + .../GetYearPathsTestAsync.json | 4 + .../NoSegmentsRemainingInStartYear.json | 4 + .../NoSegmentsRemainingInStartYearAsync.json | 4 + .../NoYearsAfterStartTime.json | 4 + .../NoYearsAfterStartTimeAsync.json | 4 + .../ChunkTests/HasNext_False.json | 4 + .../ChunkTests/HasNext_FalseAsync.json | 4 + .../ChunkTests/HasNext_True.json | 4 + .../ChunkTests/HasNext_TrueAsync.json | 4 + .../tests/SessionRecords/ChunkTests/Next.json | 4 + .../SessionRecords/ChunkTests/NextAsync.json | 4 + .../LazyLoadingBlobStreamTests/ReadAsync.json | 418 +++++++ .../ReadAsyncAsync.json | 418 +++++++ .../ReadAsync_InvalidParameterTests.json | 4 + .../ReadAsync_InvalidParameterTestsAsync.json | 4 + .../SegmentTests/GetCursor.json | 4 + .../SegmentTests/GetCursorAsync.json | 4 + .../SessionRecords/SegmentTests/GetPage.json | 4 + .../SegmentTests/GetPageAsync.json | 4 + .../SegmentTests/HasNext_False.json | 4 + .../SegmentTests/HasNext_FalseAsync.json | 4 + .../SegmentTests/HasNext_NotInitalized.json | 4 + .../HasNext_NotInitalizedAsync.json | 4 + .../SessionRecords/ShardTests/GetCursor.json | 4 + .../ShardTests/GetCursorAsync.json | 4 + .../ShardTests/HasNext_ChunksLeft.json | 4 + .../ShardTests/HasNext_ChunksLeftAsync.json | 4 + .../HasNext_CurrentChunkHasNext.json | 4 + .../HasNext_CurrentChunkHasNextAsync.json | 4 + .../ShardTests/HasNext_False.json | 4 + .../ShardTests/HasNext_FalseAsync.json | 4 + .../ShardTests/HasNext_NotInitalizes.json | 4 + .../HasNext_NotInitalizesAsync.json | 4 + .../tests/SessionRecords/ShardTests/Next.json | 4 + .../SessionRecords/ShardTests/NextAsync.json | 4 + .../tests/ShardTests.cs | 613 ++++++++++ .../src/Shared/Constants.cs | 46 + .../src/AvroReader.cs | 13 +- sdk/storage/Azure.Storage.sln | 18 + 103 files changed, 6840 insertions(+), 4 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs diff --git a/eng/Packages.Data.props b/eng/Packages.Data.props index e0cda5e3bf864..d47963fa2612c 100644 --- a/eng/Packages.Data.props +++ b/eng/Packages.Data.props @@ -23,6 +23,7 @@ + diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt new file mode 100644 index 0000000000000..6717353356a1c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt @@ -0,0 +1,6 @@ +Breaking Changes +================ + +12.0.0-preview.1 +-------------------------- +- New Azure.Storage.Blobs.ChangeFeed client library. \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md new file mode 100644 index 0000000000000..de3267ebd7456 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md @@ -0,0 +1,4 @@ +# Release History + +## 12.0.0-preview.1 +This preview is the first release supporting Azure Storage Blobs Change Feed. \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md new file mode 100644 index 0000000000000..4e81313d00f72 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md @@ -0,0 +1,148 @@ +# Azure Storage Blobs Change Feed client library for .NET + +> Server Version: 2019-12-12 + +The purpose of the change feed is to provide transaction logs of all the changes that occur to +the blobs and the blob metadata in your storage account. The change feed provides ordered, +guaranteed, durable, immutable, read-only log of these changes. Client applications can read these +logs at any time. The change feed enables you to build efficient and scalable solutions that +process change events that occur in your Blob Storage account at a low cost. + +[Source code][source] | [Product documentation][product_docs] + +## Getting started + +### Install the package +- TODO after we have released. + +### Prerequisites + +You need an [Azure subscription][azure_sub] and a +[Storage Account][storage_account_docs] to use this package. + +To create a new Storage Account, you can use the [Azure Portal][storage_account_create_portal], +[Azure PowerShell][storage_account_create_ps], or the [Azure CLI][storage_account_create_cli]. +Here's an example using the Azure CLI: + +```Powershell +az storage account create --name MyStorageAccount --resource-group MyResourceGroup --location westus --sku Standard_LRS +``` + +### Authenticate the Client + +Authentication works the same as in [Azure.Storage.Blobs][authenticating_with_blobs]. + +## Key concepts + +The change feed is stored as blobs in a special container in your storage account at standard blob +pricing cost. You can control the retention period of these files based on your requirements +(See the conditions of the current release). Change events are appended to the change feed as records +in the Apache Avro format specification: a compact, fast, binary format that provides rich data structures +with inline schema. This format is widely used in the Hadoop ecosystem, Stream Analytics, and Azure Data +Factory. + +You can process these logs incrementally or in-full. Any number of client applications can independently +read the change feed, in parallel, and at their own pace. Analytics applications such as Apache Drill or +Apache Spark can consume logs directly as Avro files, which let you process them at a low-cost, with +high-bandwidth, and without having to write a custom application. + +## Examples + +### Get all events in the Change Feed +```C# Snippet:SampleSnippetsChangeFeed_GetAllEvents +// Get all the events in the change feed. +List changeFeedEvents = new List(); +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync()) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` + +### Get events between a start and end time +```C# Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime +// Create the start and end time. The change feed client will round start time down to +// the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets +// with minutes and seconds. +DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); +DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + +// You can also provide just a start or end time. +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + start: startTime, + end: endTime)) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` + +### Resume with cursor +```C# Snippet:SampleSnippetsChangeFeed_ResumeWithCursor +IAsyncEnumerator> enumerator = changeFeedClient + .GetChangesAsync() + .AsPages(pageSizeHint: 10) + .GetAsyncEnumerator(); + +await enumerator.MoveNextAsync(); + +foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) +{ + changeFeedEvents.Add(changeFeedEvent); +} + +// get the change feed cursor. The cursor is not required to get each page of events, +// it is intended to be saved and used to resume iterating at a later date. +string cursor = enumerator.Current.ContinuationToken; + +// Resume iterating from the pervious position with the cursor. +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + continuation: cursor)) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` + +## Troubleshooting +All Blob service operations will throw a +[RequestFailedException][RequestFailedException] on failure with +helpful [`ErrorCode`s][error_codes]. Many of these errors are recoverable. + +## Next steps + +Get started with our [Change Feed samples][samples]: + +1. [Hello World](samples/Sample01a_HelloWorld.cs): Get changes that have occured in your storage account (or [asynchronously](samples/Sample01b_HelloWorldAsync.cs)) +2. [Auth](samples/Sample02_Auth.cs): Authenticate with connection strings, public access, shared keys, shared access signatures, and Azure Active Directory. + + +## Contributing + +See the [Storage CONTRIBUTING.md][storage_contrib] for details on building, +testing, and contributing to this library. + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. For +details, visit [cla.microsoft.com][cla]. + +This project has adopted the [Microsoft Open Source Code of Conduct][coc]. +For more information see the [Code of Conduct FAQ][coc_faq] +or contact [opencode@microsoft.com][coc_contact] with any +additional questions or comments. + + +[source]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/storage/Azure.Storage.Blobs/srcs +[product_docs]: https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-change-feed +[azure_sub]: https://azure.microsoft.com/free/ +[storage_account_docs]: https://docs.microsoft.com/azure/storage/common/storage-account-overview +[storage_account_create_ps]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell +[storage_account_create_cli]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli +[storage_account_create_portal]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal +[authenticating_with_blobs]: https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.Blobs/samples/Sample02_Auth.cs +[RequestFailedException]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/core/Azure.Core/src/RequestFailedException.cs +[error_codes]: https://docs.microsoft.com/rest/api/storageservices/blob-service-error-codes +[samples]: samples/ +[storage_contrib]: ../CONTRIBUTING.md +[cla]: https://cla.microsoft.com +[coc]: https://opensource.microsoft.com/codeofconduct/ +[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ +[coc_contact]: mailto:opencode@microsoft.com \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj new file mode 100644 index 0000000000000..2f30c91b605f9 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj @@ -0,0 +1,31 @@ + + + $(RequiredTargetFrameworks) + Microsoft Azure.Storage.Blobs.ChangeFeed client library samples + false + + + + + + + + + + PreserveNewest + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md new file mode 100644 index 0000000000000..e4847a5497efa --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md @@ -0,0 +1,15 @@ +--- +page_type: sample +languages: +- csharp +products: +- azure +- azure-storage +name: Azure.Storage.ChangeFeed samples for .NET +description: Samples for the Azure.Storage.Blobs.ChangeFeed client library +--- + +# Azure.Storage.ChangeFeed Samples + +- sample 0 +- sample 1 \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs new file mode 100644 index 0000000000000..fcf9da2a85f30 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection.Metadata.Ecma335; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Samples +{ + /// + /// Basic Azure ChangeFeed Storage samples. + /// + public class Sample01a_HelloWorld : SampleTest + { + /// + /// Download every event in the change feed. + /// + [Test] + public void ChangeFeed() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + + // Get all the events in the change feed. + List changeFeedEvents = new List(); + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges()) + { + changeFeedEvents.Add(changeFeedEvent); + } + } + + /// + /// Download change feed events between a start and end time. + /// + [Test] + public void ChangeFeedBetweenDates() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + // Create the start and end time. The change feed client will round start time down to + // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets + // with minutes and seconds. + DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); + DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + + // You can also provide just a start or end time. + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges( + start: startTime, + end: endTime)) + { + changeFeedEvents.Add(changeFeedEvent); + } + } + + /// + /// You can use the change feed cursor to resume iterating throw the change feed + /// at a later time. + /// + [Test] + public void ChangeFeedResumeWithCursor() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + IEnumerator> enumerator = changeFeedClient + .GetChanges() + .AsPages(pageSizeHint: 10) + .GetEnumerator(); + ; + + enumerator.MoveNext(); + + foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) + { + changeFeedEvents.Add(changeFeedEvent); + } + + // get the change feed cursor. The cursor is not required to get each page of events, + // it is intended to be saved and used to resume iterating at a later date. + string cursor = enumerator.Current.ContinuationToken; + + // Resume iterating from the pervious position with the cursor. + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges( + continuation: cursor)) + { + changeFeedEvents.Add(changeFeedEvent); + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs new file mode 100644 index 0000000000000..3dc4f738969ac --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs @@ -0,0 +1,121 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure.Storage; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Samples +{ + /// + /// Basic Azure ChangeFeed Storage samples. + /// + public class Sample01b_HelloWorldAsync : SampleTest + { + /// + /// Download every event in the change feed. + /// + [Test] + public async Task ChangeFeedAsync() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + + #region Snippet:SampleSnippetsChangeFeed_GetAllEvents + // Get all the events in the change feed. + List changeFeedEvents = new List(); + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync()) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion + } + + /// + /// Download change feed events between a start and end time. + /// + [Test] + public async Task ChangeFeedBetweenDatesAsync() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + #region Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime + // Create the start and end time. The change feed client will round start time down to + // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets + // with minutes and seconds. + DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); + DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + + // You can also provide just a start or end time. + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + start: startTime, + end: endTime)) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion + } + + /// + /// You can use the change feed cursor to resume iterating throw the change feed + /// at a later time. + /// + [Test] + public async Task ChangeFeedResumeWithCursorAsync() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + #region Snippet:SampleSnippetsChangeFeed_ResumeWithCursor + IAsyncEnumerator> enumerator = changeFeedClient + .GetChangesAsync() + .AsPages(pageSizeHint: 10) + .GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + + foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) + { + changeFeedEvents.Add(changeFeedEvent); + } + + // get the change feed cursor. The cursor is not required to get each page of events, + // it is intended to be saved and used to resume iterating at a later date. + string cursor = enumerator.Current.ContinuationToken; + + // Resume iterating from the pervious position with the cursor. + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + continuation: cursor)) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs new file mode 100644 index 0000000000000..0c57a2e78eba5 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("Azure.Storage.Blobs.ChangeFeed.Tests, PublicKey=" + + "0024000004800000940000000602000000240000525341310004000001000100d15ddcb2968829" + + "5338af4b7686603fe614abd555e09efba8fb88ee09e1f7b1ccaeed2e8f823fa9eef3fdd60217fc" + + "012ea67d2479751a0b8c087a4185541b851bd8b16f8d91b840e51b1cb0ba6fe647997e57429265" + + "e85ef62d565db50a69ae1647d54d7bd855e4db3d8a91510e5bcbd0edfbbecaa20a7bd9ae74593d" + + "aa7b11b4")] +[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7")] +[assembly: Azure.Core.AzureResourceProviderNamespace("Microsoft.Storage")] diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs new file mode 100644 index 0000000000000..13ba1f019e595 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.IO; +using Azure.Storage.Internal.Avro; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Creates AvroReaders. Allows us to inject mock AvroReaders in + /// the Chunk unit tests. + /// + internal class AvroReaderFactory + { + public virtual AvroReader BuildAvroReader(Stream dataStream) + => new AvroReader(dataStream); + + public virtual AvroReader BuildAvroReader( + Stream dataStream, + Stream headStream, + long blockOffset, + long eventIndex) + => new AvroReader( + dataStream, + headStream, + blockOffset, + eventIndex); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj new file mode 100644 index 0000000000000..d1d9b80dcdce9 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj @@ -0,0 +1,48 @@ + + + $(RequiredTargetFrameworks) + + + Microsoft Azure.Storage.Blobs.ChangeFeed client library + 12.0.0-preview.1 + ChangeFeedSDK;$(DefineConstants) + Microsoft Azure Change Feed;Microsoft;Azure;Storage;StorageScalable;$(PackageCommonTags) + + This client library enables working with the Microsoft Azure Storage Change Feed feature to review and monitor changes to an Azure Storage account. + For this release see notes - https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/README.md and https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/CHANGELOG.md + in addition to the breaking changes https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/BreakingChanges.txt + Microsoft Azure Storage quickstarts and tutorials - TODO + Microsoft Azure Storage REST API Reference - TODO + + false + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs new file mode 100644 index 0000000000000..dce79b08a94be --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedPagableAsync. + /// + public class BlobChangeFeedAsyncPagable : AsyncPageable + { + private readonly ChangeFeedFactory _changeFeedFactory; + private readonly BlobServiceClient _blobServiceClient; + private readonly DateTimeOffset? _startTime; + private readonly DateTimeOffset? _endTime; + private readonly string _continuation; + + /// + /// Internal constructor. + /// + internal BlobChangeFeedAsyncPagable( + BlobServiceClient blobServiceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; + _startTime = startTime; + _endTime = endTime; + } + + internal BlobChangeFeedAsyncPagable( + BlobServiceClient blobServiceClient, + string continuation) + { + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; + _continuation = continuation; + } + + /// + /// Returns s as Pages. + /// + /// + /// Throws an . To use contination, call + /// . + /// + /// + /// Page size. + /// + /// + /// . + /// + public override async IAsyncEnumerable> AsPages( + string continuationToken = null, + int? pageSizeHint = null) + { + if (continuationToken != null) + { + throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChangesAsync(string) instead"); + } + + ChangeFeed changeFeed = await _changeFeedFactory.BuildChangeFeed( + async: true, + _startTime, + _endTime, + _continuation) + .ConfigureAwait(false); + + while (changeFeed.HasNext()) + { + yield return await changeFeed.GetPage( + async: true, + pageSize: pageSizeHint ?? 512).ConfigureAwait(false); + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs new file mode 100644 index 0000000000000..d469d28534f51 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs @@ -0,0 +1,219 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedClient. + /// + public class BlobChangeFeedClient + { + private BlobServiceClient _blobServiceClient; + + /// + /// Constructor. + /// + protected BlobChangeFeedClient() { } + + internal BlobChangeFeedClient(BlobServiceClient blobServiceClient) + { + _blobServiceClient = blobServiceClient; + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A connection string includes the authentication information + /// required for your application to access data in an Azure Storage + /// account at runtime. + /// + /// For more information, . + /// + public BlobChangeFeedClient(string connectionString) + { + _blobServiceClient = new BlobServiceClient(connectionString); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A connection string includes the authentication information + /// required for your application to access data in an Azure Storage + /// account at runtime. + /// + /// For more information, . + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(string connectionString, BlobClientOptions options) + + { + _blobServiceClient = new BlobServiceClient(connectionString, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// The shared key credential used to sign requests. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, StorageSharedKeyCredential credential, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, credential, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// The token credential used to sign requests. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, TokenCredential credential, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, credential, options); + } + + /// + /// GetChanges. + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges() +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient); + return pageable; + } + + /// + /// GetChanges. + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges(string continuation) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient, + continuation); + return pageable; + } + + /// + /// GetChanges. + /// + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges(DateTimeOffset start = default, DateTimeOffset end = default) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient, + start, + end); + return pageable; + } + + /// + /// GetChangesAsync. + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync() +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient); + return asyncPagable; + } + + /// + /// GetChangesAsync. + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync(string continuation) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient, + continuation); + return asyncPagable; + } + + /// + /// GetChangesAsync. + /// + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync( + DateTimeOffset start = default, + DateTimeOffset end = default) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable( + _blobServiceClient, + start, + end); + return asyncPagable; + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs new file mode 100644 index 0000000000000..db2823badacca --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Threading.Tasks; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedExtensions. + /// + public static class BlobChangeFeedExtensions + { + /// + /// GetChangeFeedClient. + /// + /// + /// . + public static BlobChangeFeedClient GetChangeFeedClient(this BlobServiceClient serviceClient) + { + return new BlobChangeFeedClient(serviceClient); + } + + /// + /// Builds a DateTimeOffset from a segment path. + /// + internal static DateTimeOffset? ToDateTimeOffset(this string segmentPath) + { + if (segmentPath == null) + { + return default; + } + string[] splitPath = segmentPath.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries); + + if (splitPath.Length < 3) + { + throw new ArgumentException($"{nameof(segmentPath)} is not a valid segment path."); + } + + return new DateTimeOffset( + year: int.Parse(splitPath[2], CultureInfo.InvariantCulture), + month: splitPath.Length >= 4 + ? int.Parse(splitPath[3], CultureInfo.InvariantCulture) + : 1, + day: splitPath.Length >= 5 + ? int.Parse(splitPath[4], CultureInfo.InvariantCulture) + : 1, + hour: splitPath.Length >= 6 + ? int.Parse(splitPath[5], CultureInfo.InvariantCulture) / 100 + : 0, + minute: 0, + second: 0, + offset: TimeSpan.Zero); + } + + /// + /// Rounds a DateTimeOffset down to the nearest hour. + /// + internal static DateTimeOffset? RoundDownToNearestHour(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + return new DateTimeOffset( + year: dateTimeOffset.Value.Year, + month: dateTimeOffset.Value.Month, + day: dateTimeOffset.Value.Day, + hour: dateTimeOffset.Value.Hour, + minute: 0, + second: 0, + offset: dateTimeOffset.Value.Offset); + } + + /// + /// Rounds a DateTimeOffset up to the nearest hour. + /// + internal static DateTimeOffset? RoundUpToNearestHour(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + DateTimeOffset? newDateTimeOffest = dateTimeOffset.RoundDownToNearestHour(); + + return newDateTimeOffest.Value.AddHours(1); + } + + internal static DateTimeOffset? RoundDownToNearestYear(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + return new DateTimeOffset( + year: dateTimeOffset.Value.Year, + month: 1, + day: 1, + hour: 0, + minute: 0, + second: 0, + offset: TimeSpan.Zero); + } + + internal static async Task> GetSegmentsInYear( + bool async, + BlobContainerClient containerClient, + string yearPath, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + prefix: yearPath) + .ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + prefix: yearPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + + return new Queue(list); + } + + internal static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTimeOffset? endDate) + { + if (endDate.HasValue && endDate.Value < lastConsumable) + { + return endDate.Value; + } + + return lastConsumable; + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs new file mode 100644 index 0000000000000..cde36551fdfa6 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedPagable. + /// + public class BlobChangeFeedPagable : Pageable + { + private readonly ChangeFeedFactory _changeFeedFactory; + private readonly BlobServiceClient _blobServiceClient; + private readonly DateTimeOffset? _startTime; + private readonly DateTimeOffset? _endTime; + private readonly string _continuation; + + internal BlobChangeFeedPagable( + BlobServiceClient blobServiceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; + _startTime = startTime; + _endTime = endTime; + } + + internal BlobChangeFeedPagable( + BlobServiceClient blobServiceClient, + string continuation) + { + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; + _continuation = continuation; + } + + /// + /// Returns s as Pages. + /// + /// + /// Throws an . To use contination, call + /// . + /// + /// + /// Page size. + /// + /// + /// . + /// + public override IEnumerable> AsPages(string continuationToken = null, int? pageSizeHint = null) + { + if (continuationToken != null) + { + throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChanges(string) instead"); + } + + ChangeFeed changeFeed = _changeFeedFactory.BuildChangeFeed( + async: false, + _startTime, + _endTime, + _continuation) + .EnsureCompleted(); + + while (changeFeed.HasNext()) + { + yield return changeFeed.GetPage( + async: false, + pageSize: pageSizeHint ?? 512).EnsureCompleted(); + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs new file mode 100644 index 0000000000000..9cab135ffad16 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; +using System.Threading; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChangeFeed + { + /// + /// BlobContainerClient for making List Blob requests and creating Segments. + /// + private readonly BlobContainerClient _containerClient; + + /// + /// A for creating new s. + /// + private readonly SegmentFactory _segmentFactory; + + /// + /// Queue of paths to years we haven't processed yet. + /// + private readonly Queue _years; + + /// + /// Paths to segments in the current year we haven't processed yet. + /// + private Queue _segments; + + /// + /// The Segment we are currently processing. + /// + private Segment _currentSegment; + + /// + /// The latest time the Change Feed can safely be read from. + /// + private DateTimeOffset _lastConsumable; + + /// + /// User-specified start time. If the start time occurs before Change Feed was enabled + /// for this account, we will start at the beginning of the Change Feed. + /// + private DateTimeOffset? _startTime; + + /// + /// User-specified end time. If the end time occurs after _lastConsumable, we will + /// end at _lastConsumable. + /// + private DateTimeOffset? _endTime; + + /// + /// If this Change Feed has no events. + /// + private bool _empty; + + public ChangeFeed( + BlobContainerClient containerClient, + SegmentFactory segmentFactory, + Queue years, + Queue segments, + Segment currentSegment, + DateTimeOffset lastConsumable, + DateTimeOffset? startTime, + DateTimeOffset? endTime) + { + _containerClient = containerClient; + _segmentFactory = segmentFactory; + _years = years; + _segments = segments; + _currentSegment = currentSegment; + _lastConsumable = lastConsumable; + _startTime = startTime; + _endTime = endTime; + _empty = false; + } + + /// + /// Constructor for mocking, and for creating a Change Feed with no Events. + /// + public ChangeFeed() { } + + // The last segment may still be adding chunks. + public async Task> GetPage( + bool async, + int pageSize = Constants.ChangeFeed.DefaultPageSize, + CancellationToken cancellationToken = default) + { + if (!HasNext()) + { + throw new InvalidOperationException("Change feed doesn't have any more events"); + } + + if (_currentSegment.DateTime >= _endTime) + { + return BlobChangeFeedEventPage.Empty(); + } + + if (!_currentSegment.Finalized) + { + return BlobChangeFeedEventPage.Empty(); + } + + if (pageSize > Constants.ChangeFeed.DefaultPageSize) + { + pageSize = Constants.ChangeFeed.DefaultPageSize; + } + + // Get next page + List blobChangeFeedEvents = new List(); + + int remainingEvents = pageSize; + while (blobChangeFeedEvents.Count < pageSize + && HasNext()) + { + List newEvents = await _currentSegment.GetPage( + async, + remainingEvents, + cancellationToken).ConfigureAwait(false); + blobChangeFeedEvents.AddRange(newEvents); + remainingEvents -= newEvents.Count; + await AdvanceSegmentIfNecessary(async).ConfigureAwait(false); + } + + return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor())); + } + + public bool HasNext() + { + // [If Change Feed is empty], or [current segment is not finalized] + // or ([segment count is 0] and [year count is 0] and [current segment doesn't have next]) + if (_empty + || !_currentSegment.Finalized + || _segments.Count == 0 + && _years.Count == 0 + && !_currentSegment.HasNext()) + { + return false; + } + + if (_endTime.HasValue) + { + return _currentSegment.DateTime < _endTime; + } + + return true; + } + + public DateTimeOffset LastConsumable() + { + return _lastConsumable; + } + + internal ChangeFeedCursor GetCursor() + => new ChangeFeedCursor( + urlHash: _containerClient.Uri.ToString().GetHashCode(), + endDateTime: _endTime, + currentSegmentCursor: _currentSegment.GetCursor()); + + internal async Task> GetSegmentsInYear( + bool async, + string yearPath, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: yearPath) + .ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: yearPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + + return new Queue(list); + } + + private async Task AdvanceSegmentIfNecessary(bool async) + { + // If the current segment has more Events, we don't need to do anything. + if (_currentSegment.HasNext()) + { + return; + } + + // If the current segment is completed, remove it + if (_segments.Count > 0) + { + _currentSegment = await _segmentFactory.BuildSegment( + async, + _segments.Dequeue()).ConfigureAwait(false); + } + + // If _segments is empty, refill it + else if (_segments.Count == 0 && _years.Count > 0) + { + string yearPath = _years.Dequeue(); + + // Get Segments for first year + _segments = await GetSegmentsInYear( + async: async, + yearPath: yearPath, + startTime: _startTime, + endTime: _endTime) + .ConfigureAwait(false); + + if (_segments.Count > 0) + { + _currentSegment = await _segmentFactory.BuildSegment( + async, + _segments.Dequeue()) + .ConfigureAwait(false); + } + } + } + + public static ChangeFeed Empty() + => new ChangeFeed + { + _empty = true + }; + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs new file mode 100644 index 0000000000000..1b43a443a3b47 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs @@ -0,0 +1,204 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChangeFeedFactory + { + private readonly SegmentFactory _segmentFactory; + private readonly BlobContainerClient _containerClient; + + public ChangeFeedFactory( + BlobServiceClient blobServiceClient) + { + _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); + _segmentFactory = new SegmentFactory( + _containerClient, + new ShardFactory( + _containerClient, + new ChunkFactory( + _containerClient, + new LazyLoadingBlobStreamFactory(), + new AvroReaderFactory()))); + } + + public ChangeFeedFactory( + BlobContainerClient containerClient, + SegmentFactory segmentFactory) + { + _containerClient = containerClient; + _segmentFactory = segmentFactory; + } + + public async Task BuildChangeFeed( + bool async, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default, + string continuation = default) + { + DateTimeOffset lastConsumable; + Queue years = new Queue(); + Queue segments = new Queue(); + ChangeFeedCursor cursor = null; + + // Create cursor + if (continuation != null) + { + cursor = JsonSerializer.Deserialize(continuation); + ValidateCursor(_containerClient, cursor); + startTime = cursor.CurrentSegmentCursor.SegmentTime; + endTime = cursor.EndTime; + } + // Round start and end time if we are not using the cursor. + else + { + startTime = startTime.RoundDownToNearestHour(); + endTime = endTime.RoundUpToNearestHour(); + } + + // Check if Change Feed has been abled for this account. + bool changeFeedContainerExists; + + if (async) + { + changeFeedContainerExists = await _containerClient.ExistsAsync().ConfigureAwait(false); + } + else + { + changeFeedContainerExists = _containerClient.Exists(); + } + + if (!changeFeedContainerExists) + { + throw new ArgumentException("Change Feed hasn't been enabled on this account, or is currently being enabled."); + } + + // Get last consumable + BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); + BlobDownloadInfo blobDownloadInfo; + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + JsonDocument jsonMetaSegment; + if (async) + { + jsonMetaSegment = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content); + } + + lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); + + // Get year paths + years = await GetYearPaths(async).ConfigureAwait(false); + + // Dequeue any years that occur before start time + if (startTime.HasValue) + { + while (years.Count > 0 + && years.Peek().ToDateTimeOffset() < startTime.RoundDownToNearestYear()) + { + years.Dequeue(); + } + } + + // There are no years. + if (years.Count == 0) + { + return ChangeFeed.Empty(); + } + + while (segments.Count == 0 && years.Count > 0) + { + // Get Segments for year + segments = await BlobChangeFeedExtensions.GetSegmentsInYear( + async: async, + containerClient: _containerClient, + yearPath: years.Dequeue(), + startTime: startTime, + endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime)) + .ConfigureAwait(false); + } + + // We were on the last year, and there were no more segments. + if (segments.Count == 0) + { + return ChangeFeed.Empty(); + } + + Segment currentSegment = await _segmentFactory.BuildSegment( + async, + segments.Dequeue(), + cursor?.CurrentSegmentCursor) + .ConfigureAwait(false); + + return new ChangeFeed( + _containerClient, + _segmentFactory, + years, + segments, + currentSegment, + lastConsumable, + startTime, + endTime); + } + + private static void ValidateCursor( + BlobContainerClient containerClient, + ChangeFeedCursor cursor) + { + if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash) + { + throw new ArgumentException("Cursor URL does not match container URL"); + } + } + + internal async Task> GetYearPaths( + bool async) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/").ConfigureAwait(false)) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/")) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + return new Queue(list); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs new file mode 100644 index 0000000000000..37262bf3cc5fd --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Internal.Avro; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Chunk. + /// + internal class Chunk + { + /// + /// Avro Reader to parser the Events. + /// + private readonly AvroReader _avroReader; + + /// + /// The byte offset of the beginning of the current + /// Block. + /// + public virtual long BlockOffset { get; private set; } + + /// + /// The index of the Event within the current block. + /// + public virtual long EventIndex { get; private set; } + + public Chunk( + AvroReader avroReader, + long blockOffset, + long eventIndex) + { + _avroReader = avroReader; + BlockOffset = blockOffset; + EventIndex = eventIndex; + } + + public virtual bool HasNext() + => _avroReader.HasNext(); + + public virtual async Task Next( + bool async, + CancellationToken cancellationToken = default) + { + Dictionary result; + + if (!HasNext()) + { + return null; + } + + result = (Dictionary)await _avroReader.Next(async, cancellationToken).ConfigureAwait(false); + BlockOffset = _avroReader.BlockOffset; + EventIndex = _avroReader.ObjectIndex; + return new BlobChangeFeedEvent(result); + } + + /// + /// Constructor for mocking. Do not use. + /// + internal Chunk() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs new file mode 100644 index 0000000000000..b400045f1a171 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.IO; +using Azure.Storage.Internal.Avro; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChunkFactory + { + private readonly LazyLoadingBlobStreamFactory _lazyLoadingBlobStreamFactory; + private readonly AvroReaderFactory _avroReaderFactory; + private readonly BlobContainerClient _containerClient; + + public ChunkFactory( + BlobContainerClient containerClient, + LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory, + AvroReaderFactory avroReaderFactory) + { + _containerClient = containerClient; + _lazyLoadingBlobStreamFactory = lazyLoadingBlobStreamFactory; + _avroReaderFactory = avroReaderFactory; + } + + public virtual Chunk BuildChunk( + string chunkPath, + long? blockOffset = default, + long? eventIndex = default) + { + BlobClient blobClient = _containerClient.GetBlobClient(chunkPath); + blockOffset ??= 0; + eventIndex ??= 0; + AvroReader avroReader; + + Stream dataStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( + blobClient, + offset: blockOffset.Value, + blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize); + + // We aren't starting from the beginning of the Chunk + if (blockOffset != 0) + { + Stream headStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( + blobClient, + offset: 0, + blockSize: 3 * Constants.KB); + + avroReader = _avroReaderFactory.BuildAvroReader( + dataStream, + headStream, + blockOffset.Value, + eventIndex.Value); + } + else + { + avroReader = _avroReaderFactory.BuildAvroReader(dataStream); + } + + return new Chunk( + avroReader, + blockOffset.Value, + eventIndex.Value); + } + + /// + /// Constructor for mocking. + /// + public ChunkFactory() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs new file mode 100644 index 0000000000000..9d2ac838acd1b --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs @@ -0,0 +1,246 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class LazyLoadingBlobStream : Stream + { + /// + /// BlobClient to make download calls with. + /// + private readonly BlobClient _blobClient; + + /// + /// The offset within the blob of the next block we will download. + /// + private long _offset; + + /// + /// The number of bytes we'll download with each download call. + /// + private readonly long _blockSize; + + /// + /// Underlying Stream. + /// + private Stream _stream; + + /// + /// If this LazyLoadingBlobStream has been initalized. + /// + private bool _initalized; + + /// + /// The number of bytes in the last download call. + /// + private long _lastDownloadBytes; + + /// + /// The current length of the blob. + /// + private long _blobLength; + + public LazyLoadingBlobStream(BlobClient blobClient, long offset, long blockSize) + { + _blobClient = blobClient; + _offset = offset; + _blockSize = blockSize; + _initalized = false; + } + + /// + /// Constructor for mocking. + /// + public LazyLoadingBlobStream() { } + + /// + public override int Read( + byte[] buffer, + int offset, + int count) + => ReadInternal( + async: false, + buffer, + offset, + count).EnsureCompleted(); + + /// + public override async Task ReadAsync( + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken) + => await ReadInternal( + async: true, + buffer, + offset, + count, + cancellationToken).ConfigureAwait(false); + + /// + /// Initalizes this LazyLoadingBlobStream. + /// The number of bytes that were downloaded in the first download request. + /// + private async Task Initalize(bool async, CancellationToken cancellationToken) + { + await DownloadBlock(async, cancellationToken).ConfigureAwait(false); + _initalized = true; + } + + /// + /// Downloads the next block. + /// Number of bytes that were downloaded + /// + private async Task DownloadBlock(bool async, CancellationToken cancellationToken) + { + Response response; + HttpRange range = new HttpRange(_offset, _blockSize); + + response = async + ? await _blobClient.DownloadAsync(range, cancellationToken: cancellationToken).ConfigureAwait(false) + : _blobClient.Download(range); + _stream = response.Value.Content; + _offset += response.Value.ContentLength; + _lastDownloadBytes = response.Value.ContentLength; + _blobLength = GetBlobLength(response); + } + + /// + /// Shared sync and async Read implementation. + /// + private async Task ReadInternal( + bool async, + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken = default) + { + ValidateReadParameters(buffer, offset, count); + + if (!_initalized) + { + await Initalize(async, cancellationToken: cancellationToken).ConfigureAwait(false); + if (_lastDownloadBytes == 0) + { + return 0; + } + } + + int totalCopiedBytes = 0; + do + { + int copiedBytes = async + ? await _stream.ReadAsync(buffer, offset, count).ConfigureAwait(false) + : _stream.Read(buffer, offset, count); + offset += copiedBytes; + count -= copiedBytes; + totalCopiedBytes += copiedBytes; + + // We've run out of bytes in the current block. + if (copiedBytes == 0) + { + // We hit the end of the blob with the last download call. + if (_offset == _blobLength) + { + return totalCopiedBytes; + } + + // Download the next block + else + { + await DownloadBlock(async, cancellationToken).ConfigureAwait(false); + } + } + } + while (count > 0); + return totalCopiedBytes; + } + + private static void ValidateReadParameters(byte[] buffer, int offset, int count) + { + if (buffer == null) + { + throw new ArgumentNullException($"{nameof(buffer)}", $"{nameof(buffer)} cannot be null."); + } + + if (offset < 0) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot be less than 0."); + } + + if (offset > buffer.Length) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot exceed {nameof(buffer)} length."); + } + + if (count < 0) + { + throw new ArgumentOutOfRangeException($"{nameof(count)} cannot be less than 0."); + } + + if (offset + count > buffer.Length) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} + {nameof(count)} cannot exceed {nameof(buffer)} length."); + } + } + + private static long GetBlobLength(Response response) + { + string lengthString = response.Value.Details.ContentRange; + string[] split = lengthString.Split('/'); + return Convert.ToInt64(split[1], CultureInfo.InvariantCulture); + } + + /// + public override bool CanRead => true; + + /// + public override bool CanSeek => false; + + /// + public override bool CanWrite => throw new NotSupportedException(); + + public override long Length => throw new NotSupportedException(); + + /// + public override long Position { + get => _stream.Position; + set => throw new NotSupportedException(); + } + + /// + public override void Flush() + { + } + + /// + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + /// + public override void SetLength(long value) + { + throw new NotSupportedException(); + } + + /// + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException(); + } + + protected override void Dispose(bool disposing) => _stream.Dispose(); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs new file mode 100644 index 0000000000000..73fd0a94b896e --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Creates LazyLoadingBlobStreams. Allows us to inject mock + /// LazyLoadingBlobStreams in the Chunk unit tests. + /// + internal class LazyLoadingBlobStreamFactory + { + public virtual LazyLoadingBlobStream BuildLazyLoadingBlobStream( + BlobClient blobClient, + long offset, + long blockSize) + => new LazyLoadingBlobStream( + blobClient, + offset, + blockSize); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs new file mode 100644 index 0000000000000..ea0f595e30258 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEvent. + /// + public class BlobChangeFeedEvent + { + /// + /// Internal constructor. + /// + internal BlobChangeFeedEvent(Dictionary record) + { + Topic = (string)record[Constants.ChangeFeed.Event.Topic]; + Subject = (string)record[Constants.ChangeFeed.Event.Subject]; + EventType = ToBlobChangeFeedEventType((string)record[Constants.ChangeFeed.Event.EventType]); + EventTime = DateTimeOffset.Parse((string)record[Constants.ChangeFeed.Event.EventTime], CultureInfo.InvariantCulture); + Id = Guid.Parse((string)record[Constants.ChangeFeed.Event.EventId]); + EventData = new BlobChangeFeedEventData((Dictionary)record[Constants.ChangeFeed.Event.Data]); + record.TryGetValue(Constants.ChangeFeed.Event.DataVersion, out object dataVersion); + DataVersion = (long?)dataVersion; + record.TryGetValue(Constants.ChangeFeed.Event.MetadataVersion, out object metadataVersion); + MetadataVersion = (string)metadataVersion; + } + + internal BlobChangeFeedEvent() { } + + /// + /// Full resource path to the event source. This field is not writeable. Event Grid provides this value. + /// + public string Topic { get; internal set; } + + /// + /// Publisher-defined path to the event subject. + /// + public string Subject { get; internal set; } + + /// + /// One of the registered event types for this event source. + /// + public BlobChangeFeedEventType EventType { get; internal set; } + + /// + /// The time the event is generated based on the provider's UTC time. + /// + public DateTimeOffset EventTime { get; internal set; } + + /// + /// Unique identifier for the event. + /// + public Guid Id { get; internal set; } + + /// + /// Blob storage event data. + /// + public BlobChangeFeedEventData EventData { get; internal set; } + + /// + /// The schema version of the data object. The publisher defines the schema version. + /// + public long? DataVersion { get; internal set; } + + /// + /// The schema version of the event metadata. Event Grid defines the schema of the top-level properties. + /// Event Grid provides this value. + /// + public string MetadataVersion { get; internal set; } + + /// + public override string ToString() => $"{EventTime}: {EventType} {Subject} ({EventData?.ToString() ?? "Unknown Event"})"; + + private static BlobChangeFeedEventType ToBlobChangeFeedEventType(string s) + { + switch (s) + { + case "BlobCreated": + return BlobChangeFeedEventType.BlobCreated; + case "BlobDeleted": + return BlobChangeFeedEventType.BlobDeleted; + default: + return default; + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs new file mode 100644 index 0000000000000..8edb9b1a2d4d0 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEventData. + /// + public class BlobChangeFeedEventData + { + /// + /// Internal constructor. + /// + internal BlobChangeFeedEventData() { } + + internal BlobChangeFeedEventData(Dictionary record) + { + Api = ((string)record[Constants.ChangeFeed.EventData.Api]); + ClientRequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.ClientRequestId]); + RequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.RequestId]); + ETag = new ETag((string)record[Constants.ChangeFeed.EventData.Etag]); + ContentType = (string)record[Constants.ChangeFeed.EventData.ContentType]; + ContentLength = (long)record[Constants.ChangeFeed.EventData.ContentLength]; + BlobType = ((string)record[Constants.ChangeFeed.EventData.BlobType]) switch + { + Constants.ChangeFeed.EventData.BlockBlob => BlobType.Block, + Constants.ChangeFeed.EventData.PageBlob => BlobType.Page, + Constants.ChangeFeed.EventData.AppendBlob => BlobType.Append, + _ => default + }; + record.TryGetValue(Constants.ChangeFeed.EventData.ContentOffset, out object contentOffset); + ContentOffset = (long?)contentOffset; + record.TryGetValue(Constants.ChangeFeed.EventData.DestinationUrl, out object destinationUrl); + DestinationUri = !string.IsNullOrEmpty((string)destinationUrl) ? new Uri((string)destinationUrl) : null; + record.TryGetValue(Constants.ChangeFeed.EventData.SourceUrl, out object sourceUrl); + SourceUri = !string.IsNullOrEmpty((string)sourceUrl) ? new Uri((string)sourceUrl) : null; + record.TryGetValue(Constants.ChangeFeed.EventData.Url, out object url); + Uri = !string.IsNullOrEmpty((string)url) ? new Uri((string)url) : null; + record.TryGetValue(Constants.ChangeFeed.EventData.Recursive, out object recursive); + Recursive = (bool?)recursive; + Sequencer = (string)record[Constants.ChangeFeed.EventData.Sequencer]; + } + + /// + /// The operation that triggered the event. + /// + public string Api { get; internal set; } + + /// + /// A client-provided request id for the storage API operation. This id can be used to correlate to Azure Storage + /// diagnostic logs using the "client-request-id" field in the logs, and can be provided in client requests using + /// the "x-ms-client-request-id" header. + /// + public Guid ClientRequestId { get; internal set; } + + /// + /// Service-generated request id for the storage API operation. Can be used to correlate to Azure Storage diagnostic + /// logs using the "request-id-header" field in the logs and is returned from initiating API call in the + /// 'x-ms-request-id' header. + /// + public Guid RequestId { get; internal set; } + + /// + /// The value that you can use to perform operations conditionally. + /// + public ETag ETag { get; internal set; } + + /// + /// The content type specified for the blob. + /// + public string ContentType { get; internal set; } + + /// + /// The size of the blob in bytes. + /// + public long ContentLength { get; internal set; } + + /// + /// The type of blob. Valid values are either BlockBlob or PageBlob. + /// + public BlobType BlobType { get; internal set; } + + /// + /// The offset in bytes of a write operation taken at the point where the event-triggering application completed + /// writing to the file. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public long? ContentOffset { get; internal set; } + + /// + /// The url of the file that will exist after the operation completes. For example, if a file is renamed, + /// the destinationUrl property contains the url of the new file name. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public Uri DestinationUri { get; internal set; } + + /// + /// The url of the file that exists prior to the operation. For example, if a file is renamed, the sourceUrl + /// contains the url of the original file name prior to the rename operation. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public Uri SourceUri { get; internal set; } + + /// + /// The path to the blob. + /// If the client uses a Blob REST API, then the url has this structure: + /// (storage-account-name).blob.core.windows.net/(container-name)/(file-name) + /// If the client uses a Data Lake Storage REST API, then the url has this structure: + /// (storage-account-name).dfs.core.windows.net/(file-system-name)/(file-name). + /// + public Uri Uri { get; internal set; } + + /// + /// True to perform the operation on all child directories; otherwise False. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public bool? Recursive { get; internal set; } + + /// + /// An opaque string value representing the logical sequence of events for any particular blob name. + /// Users can use standard string comparison to understand the relative sequence of two events on the same blob name. + /// + public string Sequencer { get; internal set; } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs new file mode 100644 index 0000000000000..25077943c44ba --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + internal class BlobChangeFeedEventPage : Page + { + public override IReadOnlyList Values { get; } + public override string ContinuationToken { get; } + public override Response GetRawResponse() => null; + //private Response _raw; + + public BlobChangeFeedEventPage() { } + + public BlobChangeFeedEventPage(List events, string continuationToken) + { + Values = events; + ContinuationToken = continuationToken; + } + + public static BlobChangeFeedEventPage Empty() + => new BlobChangeFeedEventPage( + new List(), + null); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs new file mode 100644 index 0000000000000..c14fdef71666c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEventType. + /// + public enum BlobChangeFeedEventType + { + /// + /// Blob created. + /// + BlobCreated = 0, + + /// + /// Blob deleted. + /// + BlobDeleted = 1, + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs new file mode 100644 index 0000000000000..fd571354030ee --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedModelFactory for building mock objects. + /// + public static class BlobChangeFeedModelFactory + { + /// + /// Creates a new BlobChangeFeedEvent instance for mocking. + /// + public static BlobChangeFeedEvent BlobChangeFeedEvent( + string topic, + string subject, + BlobChangeFeedEventType eventType, + DateTimeOffset eventTime, + Guid id, + BlobChangeFeedEventData eventData, + long dataVersion, + string metadataVersion) + => new BlobChangeFeedEvent + { + Topic = topic, + Subject = subject, + EventType = eventType, + EventTime = eventTime, + Id = id, + EventData = eventData, + DataVersion = dataVersion, + MetadataVersion = metadataVersion + }; + + /// + /// Creates a new BlobChangeFeedEventData instance for mocking. + /// + public static BlobChangeFeedEventData BlobChangeFeedEventData( + string api, + Guid clientRequestId, + Guid requestId, + ETag eTag, + string contentType, + long contentLength, + BlobType blobType, + long contentOffset, + Uri destinationUri, + Uri sourceUri, + Uri uri, + bool recursive, + string sequencer) + => new BlobChangeFeedEventData + { + Api = api, + ClientRequestId = clientRequestId, + RequestId = requestId, + ETag = eTag, + ContentType = contentType, + ContentLength = contentLength, + BlobType = blobType, + ContentOffset = contentOffset, + DestinationUri = destinationUri, + SourceUri = sourceUri, + Uri = uri, + Recursive = recursive, + Sequencer = sequencer + }; + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs new file mode 100644 index 0000000000000..8036b2427bb36 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedCursor. + /// + internal class ChangeFeedCursor + { + /// + /// CursorVersion. + /// + public int CursorVersion { get; set; } + + /// + /// UrlHash. + /// + public long UrlHash { get; set; } + + /// + /// EndDateTime. + /// + public DateTimeOffset? EndTime { get; set; } + + /// + /// The Segment Cursor for the current segment. + /// + public SegmentCursor CurrentSegmentCursor { get; set; } + + internal ChangeFeedCursor( + long urlHash, + DateTimeOffset? endDateTime, + SegmentCursor currentSegmentCursor) + { + CursorVersion = 1; + UrlHash = urlHash; + EndTime = endDateTime; + CurrentSegmentCursor = currentSegmentCursor; + } + + public ChangeFeedCursor() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs new file mode 100644 index 0000000000000..a732ce776dfe1 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// Segment Cursor. + /// + internal class SegmentCursor + { + /// + /// Shard Cursors. + /// + public List ShardCursors { get; set; } + + /// + /// Index of the current Shard. + /// + public int ShardIndex { get; set; } + + /// + /// The DateTimeOffset of the Segment. + /// + public DateTimeOffset SegmentTime { get; set; } + + internal SegmentCursor( + DateTimeOffset segmentDateTime, + List shardCursors, + int shardIndex) + { + SegmentTime = segmentDateTime; + ShardCursors = shardCursors; + ShardIndex = shardIndex; + } + + public SegmentCursor() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs new file mode 100644 index 0000000000000..b7ff67c17ec21 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + internal class ShardCursor + { + /// + /// Index of the current Chunk. + /// + public long ChunkIndex { get; set; } + + /// + /// The byte offset of the beginning of + /// the current Avro block. + /// + public long BlockOffset { get; set; } + + /// + /// The index of the current event within + /// the current Avro block. + /// + public long EventIndex { get; set; } + + internal ShardCursor( + long chunkIndex, + long blockOffset, + long eventIndex) + { + ChunkIndex = chunkIndex; + BlockOffset = blockOffset; + EventIndex = eventIndex; + } + + /// + /// + /// + public ShardCursor() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs new file mode 100644 index 0000000000000..a8fa1955e904e --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class Segment + { + /// + /// If this Segment is finalized. + /// + public virtual bool Finalized { get; private set; } + + /// + /// The time (to the nearest hour) associated with this Segment. + /// + public DateTimeOffset DateTime { get; private set; } + + /// + /// The Shards associated with this Segment. + /// + private readonly List _shards; + + /// + /// The Shards we have finished reading from. + /// + private readonly HashSet _finishedShards; + + /// + /// The index of the Shard we will return the next event from. + /// + private int _shardIndex; + + public Segment( + List shards, + int shardIndex, + DateTimeOffset dateTime, + bool finalized) + { + _shards = shards; + _shardIndex = shardIndex; + DateTime = dateTime; + Finalized = finalized; + _finishedShards = new HashSet(); + } + + public virtual SegmentCursor GetCursor() + { + List shardCursors = new List(); + foreach (Shard shard in _shards) + { + shardCursors.Add(shard.GetCursor()); + } + return new SegmentCursor( + segmentDateTime: DateTime, + shardCursors: shardCursors, + shardIndex: _shardIndex); + } + + public virtual async Task> GetPage( + bool async, + int? pageSize, + CancellationToken cancellationToken = default) + { + List changeFeedEventList = new List(); + + if (!HasNext()) + { + throw new InvalidOperationException("Segment doesn't have any more events"); + } + + int i = 0; + while (i < pageSize && _shards.Count > 0) + { + // If this Shard is finished, skip it. + if (_finishedShards.Contains(_shardIndex)) + { + _shardIndex++; + + if (_shardIndex == _shards.Count) + { + _shardIndex = 0; + } + + continue; + } + + Shard currentShard = _shards[_shardIndex]; + + BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async, cancellationToken).ConfigureAwait(false); + + changeFeedEventList.Add(changeFeedEvent); + + // If the current shard is completed, remove it from _shards + if (!currentShard.HasNext()) + { + _finishedShards.Add(_shardIndex); + } + + i++; + _shardIndex++; + if (_shardIndex >= _shards.Count) + { + _shardIndex = 0; + } + + // If all the Shards are finished, we need to break out early. + if (_finishedShards.Count == _shards.Count) + { + break; + } + } + + return changeFeedEventList; + } + + public virtual bool HasNext() + => _finishedShards.Count < _shards.Count; + + /// + /// Constructor for mocking. + /// + public Segment() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs new file mode 100644 index 0000000000000..11849d500c196 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class SegmentFactory + { + private readonly BlobContainerClient _containerClient; + private readonly ShardFactory _shardFactory; + + /// + /// Constructor for mocking. + /// + public SegmentFactory() { } + + public SegmentFactory( + BlobContainerClient containerClient, + ShardFactory shardFactory) + { + _containerClient = containerClient; + _shardFactory = shardFactory; + } + +#pragma warning disable CA1822 // Does not acces instance data can be marked static. + public virtual async Task BuildSegment( +#pragma warning restore CA1822 // Can't mock static methods in MOQ. + bool async, + string manifestPath, + SegmentCursor cursor = default) + { + // Models we need for later + List shards = new List(); + DateTimeOffset dateTime = manifestPath.ToDateTimeOffset().Value; + int shardIndex = cursor?.ShardIndex ?? 0; + + // Download segment manifest + BlobClient blobClient = _containerClient.GetBlobClient(manifestPath); + BlobDownloadInfo blobDownloadInfo; + + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + // Parse segment manifest + JsonDocument jsonManifest; + + if (async) + { + jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); + } + + // Initalized Finalized field + string statusString = jsonManifest.RootElement.GetProperty("status").GetString(); + bool finalized = statusString == "Finalized"; + + int i = 0; + foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) + { + string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); + Shard shard = await _shardFactory.BuildShard( + async, + shardPath, + cursor?.ShardCursors?[i]) + .ConfigureAwait(false); + + shards.Add(shard); + i++; + } + + return new Segment( + shards, + shardIndex, + dateTime, + finalized); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs new file mode 100644 index 0000000000000..19109ad8a28fe --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; +using System.Threading; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class Shard + { + /// + /// Container Client for listing Chunks. + /// + private readonly BlobContainerClient _containerClient; + + /// + /// ChunkFactory. + /// + private readonly ChunkFactory _chunkFactory; + + /// + /// Queue of the paths to Chunks we haven't processed. + /// + private readonly Queue _chunks; + + /// + /// The Chunk we are currently processing. + /// + private Chunk _currentChunk; + + /// + /// The index of the Chunk we are processing. + /// + private long _chunkIndex; + + /// + /// Gets the for this Shard. + /// + public virtual ShardCursor GetCursor() + => new ShardCursor( + _chunkIndex, + _currentChunk.BlockOffset, + _currentChunk.EventIndex); + + /// + /// If this Shard has a next event. + /// + public virtual bool HasNext() + => _chunks.Count > 0 || _currentChunk.HasNext(); + + /// + /// Gets the next . + /// + public virtual async Task Next( + bool async, + CancellationToken cancellationToken = default) + { + if (!HasNext()) + { + throw new InvalidOperationException("Shard doesn't have any more events"); + } + + BlobChangeFeedEvent changeFeedEvent; + + changeFeedEvent = await _currentChunk.Next(async, cancellationToken).ConfigureAwait(false); + + // Remove currentChunk if it doesn't have another event. + if (!_currentChunk.HasNext() && _chunks.Count > 0) + { + _currentChunk = _chunkFactory.BuildChunk( + _chunks.Dequeue()); + _chunkIndex++; + } + return changeFeedEvent; + } + + /// + /// Constructor for use by . + /// + public Shard( + BlobContainerClient containerClient, + ChunkFactory chunkFactory, + Queue chunks, + Chunk currentChunk, + long chunkIndex) + { + _containerClient = containerClient; + _chunkFactory = chunkFactory; + _chunks = chunks; + _currentChunk = currentChunk; + _chunkIndex = chunkIndex; + } + + /// + /// Constructor for mocking. + /// + internal Shard() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs new file mode 100644 index 0000000000000..c1185171b1cbe --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Builds a Shard. + /// + internal class ShardFactory + { + private readonly ChunkFactory _chunkFactory; + private readonly BlobContainerClient _containerClient; + + public ShardFactory( + BlobContainerClient containerClient, + ChunkFactory chunkFactory) + { + _containerClient = containerClient; + _chunkFactory = chunkFactory; + } + + /// + /// Constructor for mocking. + /// + public ShardFactory() { } + +#pragma warning disable CA1822 // Does not acces instance data can be marked static. + public virtual async Task BuildShard( +#pragma warning restore CA1822 // Can't mock static methods in MOQ. + bool async, + string shardPath, + ShardCursor shardCursor = default) + { + // Models we'll need later + Queue chunks = new Queue(); + long chunkIndex = shardCursor?.ChunkIndex ?? 0; + long blockOffset = shardCursor?.BlockOffset ?? 0; + long eventIndex = shardCursor?.EventIndex ?? 0; + + // Get Chunks + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: shardPath).ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: shardPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + + // Fast forward to current Chunk + if (chunkIndex > 0) + { + for (int i = 0; i < chunkIndex; i++) + { + chunks.Dequeue(); + } + } + + Chunk currentChunk = _chunkFactory.BuildChunk( + chunks.Dequeue(), + blockOffset, + eventIndex); + + return new Shard( + _containerClient, + _chunkFactory, + chunks, + currentChunk, + chunkIndex); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj new file mode 100644 index 0000000000000..eb2fd04f30efd --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj @@ -0,0 +1,25 @@ + + + $(RequiredTargetFrameworks) + + + Microsoft Azure.Storage.Blobs.ChangeFeed client library tests + false + + + + + + + + + + PreserveNewest + + + + + PreserveNewest + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs new file mode 100644 index 0000000000000..d188628f4bb20 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core.TestFramework; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase + { + public BlobChangeFeedAsyncPagableTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + [Ignore("")] + public async Task Test() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(); + IList list = await blobChangeFeedAsyncPagable.ToListAsync(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + } + + [Test] + [Ignore("")] + public async Task PageSizeTest() + { + int pageSize = 100; + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + IAsyncEnumerator> asyncEnumerator + = blobChangeFeedClient.GetChangesAsync().AsPages(pageSizeHint: pageSize).GetAsyncEnumerator(); + List pageSizes = new List(); + while (await asyncEnumerator.MoveNextAsync()) + { + pageSizes.Add(asyncEnumerator.Current.Values.Count); + } + + // All pages except the last should have a count == pageSize. + for (int i = 0; i < pageSizes.Count - 1; i++) + { + Assert.AreEqual(pageSize, pageSizes[i]); + } + } + + [Test] + [Ignore("")] + public async Task CursorTest() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(); + IAsyncEnumerable> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 500); + Page page = await asyncEnumerable.FirstAsync(); + foreach (BlobChangeFeedEvent changeFeedEvent in page.Values) + { + Console.WriteLine(changeFeedEvent); + } + + Console.WriteLine("break"); + + string continuation = page.ContinuationToken; + + BlobChangeFeedAsyncPagable cursorBlobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(continuation); + + IList list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs new file mode 100644 index 0000000000000..631633df5223d --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedExtensionsTests : ChangeFeedTestBase + { + public BlobChangeFeedExtensionsTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public void ToDateTimeOffsetTests() + { + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700/meta.json".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019".ToDateTimeOffset()); + + Assert.AreEqual( + null, + ((string)null).ToDateTimeOffset()); + } + + [Test] + public void RoundDownToNearestHourTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestHour()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundDownToNearestHour()); + } + + [Test] + public void RoundUpToNearestHourTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 21, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundUpToNearestHour()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundUpToNearestHour()); + } + + [Test] + public void RoundDownToNearestYearTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 1, 1, 0, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestYear()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundDownToNearestYear()); + } + + + [Test] + public async Task GetSegmentsInYearTest() + { + // Arrange + Mock containerClient = new Mock(); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(pageable); + } + + // Act + Queue segmentPaths = await BlobChangeFeedExtensions.GetSegmentsInYear( + IsAsync, + containerClient.Object, + "idx/segments/2020/", + startTime: new DateTimeOffset(2020, 3, 3, 0, 0, 0, TimeSpan.Zero), + endTime: new DateTimeOffset(2020, 3, 3, 22, 0, 0, TimeSpan.Zero)); + + // Assert + Queue expectedSegmentPaths = new Queue(); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/0000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/1800/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2200/meta.json"); + + Assert.AreEqual(expectedSegmentPaths, segmentPaths); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs new file mode 100644 index 0000000000000..76a7eae639687 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedPagableTests : ChangeFeedTestBase + { + public BlobChangeFeedPagableTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + [Ignore("")] + public void Test() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedPagable blobChangeFeedPagable + = blobChangeFeedClient.GetChanges(); + IList list = blobChangeFeedPagable.ToList(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs new file mode 100644 index 0000000000000..99e4e235a9958 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedFactoryTests : ChangeFeedTestBase + { + public ChangeFeedFactoryTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public async Task GetYearPathsTest() + { + // Arrange + Mock containerClient = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + Mock segmentFactory = new Mock(); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, segmentFactory.Object); + + // Act + Queue years = await changeFeedFactory.GetYearPaths(IsAsync).ConfigureAwait(false); + + // Assert + Queue expectedYears = new Queue(); + expectedYears.Enqueue("idx/segments/2019/"); + expectedYears.Enqueue("idx/segments/2020/"); + expectedYears.Enqueue("idx/segments/2022/"); + expectedYears.Enqueue("idx/segments/2023/"); + Assert.AreEqual(expectedYears, years); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs new file mode 100644 index 0000000000000..aefdc00ff9894 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Test.Shared; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedTestBase : StorageTestBase + { + + public ChangeFeedTestBase(bool async) : this(async, null) { } + + public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) + : base(async, mode) + { + } + + public string GetNewContainerName() => $"test-container-{Recording.Random.NewGuid()}"; + public string GetNewBlobName() => $"test-blob-{Recording.Random.NewGuid()}"; + + public BlobServiceClient GetServiceClient_SharedKey() + => InstrumentClient( + new BlobServiceClient( + new Uri(TestConfigDefault.BlobServiceEndpoint), + new StorageSharedKeyCredential( + TestConfigDefault.AccountName, + TestConfigDefault.AccountKey), + GetOptions())); + + public BlobClientOptions GetOptions() + { + var options = new BlobClientOptions + { + Diagnostics = { IsLoggingEnabled = true }, + Retry = + { + Mode = RetryMode.Exponential, + MaxRetries = Constants.MaxReliabilityRetries, + Delay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.01 : 0.5), + MaxDelay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.1 : 10) + }, + Transport = GetTransport() + }; + if (Mode != RecordedTestMode.Live) + { + options.AddPolicy(new RecordedClientRequestIdPolicy(Recording), HttpPipelinePosition.PerCall); + } + + return Recording.InstrumentClientOptions(options); + } + + public async Task GetTestContainerAsync( + BlobServiceClient service = default, + string containerName = default, + IDictionary metadata = default, + PublicAccessType? publicAccessType = default, + bool premium = default) + { + + containerName ??= GetNewContainerName(); + service ??= GetServiceClient_SharedKey(); + + if (publicAccessType == default) + { + publicAccessType = premium ? PublicAccessType.None : PublicAccessType.BlobContainer; + } + + BlobContainerClient container = InstrumentClient(service.GetBlobContainerClient(containerName)); + await container.CreateAsync(metadata: metadata, publicAccessType: publicAccessType.Value); + return new DisposingContainer(container); + } + + public class DisposingContainer : IAsyncDisposable + { + public BlobContainerClient Container; + + public DisposingContainer(BlobContainerClient client) + { + Container = client; + } + + public async ValueTask DisposeAsync() + { + if (Container != null) + { + try + { + await Container.DeleteAsync(); + Container = null; + } + catch + { + // swallow the exception to avoid hiding another test failure + } + } + } + } + + public static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); + + public static Page GetYearPathFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), + }); + + public static Task> GetSegmentsInYearFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); + + public static Page GetSegmentsInYearFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), + }); + + public class BlobHierarchyItemPage : Page + { + private List _items; + + public BlobHierarchyItemPage(List items) + { + _items = items; + } + + public override IReadOnlyList Values => _items; + + public override string ContinuationToken => null; + + public override Response GetRawResponse() + { + throw new NotImplementedException(); + } + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs new file mode 100644 index 0000000000000..b23e38165a749 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -0,0 +1,1037 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Reflection; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedTests : ChangeFeedTestBase + { + public ChangeFeedTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + /// + /// Tests building a ChangeFeed with a ChangeFeedCursor, and then calling ChangeFeed.GetCursor() + /// and making sure the cursors match. + /// + [Test] + public async Task GetCursor() + { + // Arrange + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Mock segment = new Mock(MockBehavior.Strict); + + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.Setup(r => r.Uri).Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + segmentFactory.Setup(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(segment.Object); + + long chunkIndex = 1; + long blockOffset = 2; + long eventIndex = 3; + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); + int shardIndex = 0; + SegmentCursor segmentCursor = new SegmentCursor( + segmentTime, + new List + { + shardCursor + }, + shardIndex); + + segment.Setup(r => r.GetCursor()).Returns(segmentCursor); + + DateTimeOffset endDateTime = new DateTimeOffset(2020, 5, 6, 18, 0, 0, TimeSpan.Zero); + ChangeFeedCursor expectedCursor = new ChangeFeedCursor( + urlHash: containerUri.ToString().GetHashCode(), + endDateTime: endDateTime, + currentSegmentCursor: segmentCursor); + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); + + // Act + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + continuation: JsonSerializer.Serialize(expectedCursor)); + + ChangeFeedCursor actualCursor = changeFeed.GetCursor(); + + // Assert + Assert.AreEqual(expectedCursor.CursorVersion, actualCursor.CursorVersion); + Assert.AreEqual(expectedCursor.EndTime, actualCursor.EndTime); + Assert.AreEqual(expectedCursor.UrlHash, actualCursor.UrlHash); + + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.SegmentTime, actualCursor.CurrentSegmentCursor.SegmentTime); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardIndex, actualCursor.CurrentSegmentCursor.ShardIndex); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors.Count, actualCursor.CurrentSegmentCursor.ShardCursors.Count); + + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset, actualCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex); + + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + segmentFactory.Verify(r => r.BuildSegment( + IsAsync, + "idx/segments/2020/01/16/2300/meta.json", + It.Is( + r => r.SegmentTime == segmentTime + && r.ShardIndex == shardIndex + && r.ShardCursors.Count == 1 + && r.ShardCursors[0].BlockOffset == blockOffset + && r.ShardCursors[0].ChunkIndex == chunkIndex + && r.ShardCursors[0].EventIndex == eventIndex + ))); + + segment.Verify(r => r.GetCursor()); + } + + /// + /// This test has 8 total events, 4 segments, and 2 years. + /// We call ChangeFeed.GetPage() with a page size of 3, and then again with no page size, + /// resulting in two pages with 3 and 5 Events. + /// + [Test] + public async Task GetPage() + { + // Arrange + int eventCount = 8; + int segmentCount = 4; + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + List> segments = new List>(); + for (int i = 0; i < segmentCount; i++) + { + segments.Add(new Mock(MockBehavior.Strict)); + } + + // ChangeFeedFactory.BuildChangeFeed() setups. + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); + AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)) + .Returns(asyncPageable) + .Returns(asyncPageable2); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); + + Pageable pageable2 = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)) + .Returns(pageable) + .Returns(pageable2); + } + + segmentFactory.SetupSequence(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(segments[0].Object)) + .Returns(Task.FromResult(segments[1].Object)) + .Returns(Task.FromResult(segments[2].Object)) + .Returns(Task.FromResult(segments[3].Object)); + + List events = new List(); + for (int i = 0; i < eventCount; i++) + { + events.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } + + segments[0].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + segments[2].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[3].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + segments[0].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[0], + events[1] + })); + + segments[1].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[2] + })) + .Returns(Task.FromResult(new List + { + events[3] + })); + + segments[2].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[4], + events[5] + })); + + segments[3].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[6], + events[7] + })); + + for (int i = 0; i < segments.Count; i++) + { + segments[i].Setup(r => r.Finalized) + .Returns(true); + } + + long chunkIndex = 1; + long blockOffset = 2; + long eventIndex = 3; + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); + int shardIndex = 0; + SegmentCursor segmentCursor = new SegmentCursor( + segmentTime, + new List + { + shardCursor + }, + shardIndex); + ChangeFeedCursor changeFeedCursor = new ChangeFeedCursor( + containerUri.ToString().GetHashCode(), + null, + segmentCursor); + + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor); + segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor); + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync); + + // Act + Page page0 = await changeFeed.GetPage(IsAsync, 3); + Page page1 = await changeFeed.GetPage(IsAsync); + + // Assert + Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page0.ContinuationToken); + + for (int i = 0; i < 3; i++) + { + Assert.AreEqual(events[i].Id, page0.Values[i].Id); + } + + Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page1.ContinuationToken); + + for (int i = 3; i < events.Count; i++) + { + Assert.AreEqual(events[i].Id, page1.Values[i - 3].Id); + } + + // ChangeFeedFactory.BuildChangeFeed() verifies + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + // ChangeFeeed.Next() verifies. + segments[0].Verify(r => r.HasNext()); + segments[1].Verify(r => r.HasNext(), Times.Exactly(2)); + segments[2].Verify(r => r.HasNext()); + segments[3].Verify(r => r.HasNext(), Times.Exactly(3)); + + segments[0].Verify(r => r.GetPage( + IsAsync, + 3, + default)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + 1, + default)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize, + default)); + + segments[2].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize - 1, + default)); + + segments[3].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize - 3, + default)); + + segments[1].Verify(r => r.GetCursor()); + segments[3].Verify(r => r.GetCursor()); + + segments[0].Verify(r => r.Finalized, Times.Exactly(3)); + segments[1].Verify(r => r.Finalized, Times.Exactly(4)); + segments[2].Verify(r => r.Finalized, Times.Exactly(1)); + segments[3].Verify(r => r.Finalized, Times.Exactly(2)); + + containerClient.Verify(r => r.Uri, Times.Exactly(2)); + } + + [Test] + public async Task NoYearsAfterStartTime() + { + // Arrange + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Mock segment = new Mock(MockBehavior.Strict); + + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + startTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero)); + + // Act + bool hasNext = changeFeed.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + } + + [Test] + public async Task NoSegmentsRemainingInStartYear() + { + // Arrange + int eventCount = 2; + int segmentCount = 2; + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + List> segments = new List>(); + for (int i = 0; i < segmentCount; i++) + { + segments.Add(new Mock(MockBehavior.Strict)); + } + + // ChangeFeedFactory.BuildChangeFeed() setups. + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); + AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)) + .Returns(asyncPageable) + .Returns(asyncPageable2); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); + + Pageable pageable2 = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)) + .Returns(pageable) + .Returns(pageable2); + } + + segmentFactory.SetupSequence(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(segments[0].Object)) + .Returns(Task.FromResult(segments[1].Object)); + + List events = new List(); + for (int i = 0; i < eventCount; i++) + { + events.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } + + segments[0].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[0] + })); + + segments[1].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[1] + })); + + segments[0].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + segments[1].Setup(r => r.GetCursor()) + .Returns(new SegmentCursor()); + + for (int i = 0; i < segments.Count; i++) + { + segments[i].Setup(r => r.Finalized) + .Returns(true); + } + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + startTime: new DateTimeOffset(2019, 6, 1, 0, 0, 0, TimeSpan.Zero)); + + // Act + Page page = await changeFeed.GetPage(IsAsync); + + // Assert + Assert.AreEqual(2, page.Values.Count); + Assert.AreEqual(events[0].Id, page.Values[0].Id); + Assert.AreEqual(events[1].Id, page.Values[1].Id); + + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + // ChangeFeeed.Next() verifies. + segments[0].Verify(r => r.HasNext(), Times.Exactly(1)); + + segments[0].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize, + default)); + + segments[1].Verify(r => r.HasNext(), Times.Exactly(3)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize - 1, + default)); + + containerClient.Verify(r => r.Uri, Times.Exactly(1)); + + } + + public static Task> GetYearsPathShortFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearsPathShortFunc(continuation, pageSizeHint)); + + public static Page GetYearsPathShortFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null) + }); + + public static Task> GetSegmentsInYear2019FuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYear2019Func(continuation, pageSizeHint)); + + public static Page GetSegmentsInYear2019Func( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2019/03/02/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2019/04/03/2200/meta.json", false, null)) + }); + + public static Task> GetSegmentsInYear2020FuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYear2020Func(continuation, pageSizeHint)); + + public static Page GetSegmentsInYear2020Func( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)) + }); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs new file mode 100644 index 0000000000000..71cff0933289f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -0,0 +1,266 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Internal.Avro; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChunkTests : ChangeFeedTestBase + { + public ChunkTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + /// + /// Tests Chunk.HasNext() when the underlying AvroReader.HasNext() returns true. + /// + [Test] + public void HasNext_True() + { + // Arrange + string chunkPath = "chunkPath"; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); + Mock avroReader = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.Setup(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(lazyLoadingBlobStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); + avroReader.Setup(r => r.HasNext()).Returns(true); + + ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + chunkPath); + + // Act + bool hasNext = chunk.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + avroReaderFactory.Verify(r => r.BuildAvroReader(lazyLoadingBlobStream.Object)); + avroReader.Verify(r => r.HasNext()); + } + + + /// + /// Tests Chunk.HasNext() when the underlying AvroReader.HasNext() returns false. + /// + [Test] + public void HasNext_False() + { + // Arrange + string chunkPath = "chunkPath"; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); + Mock avroReader = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.Setup(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(lazyLoadingBlobStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); + avroReader.Setup(r => r.HasNext()).Returns(false); + + ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + + chunkPath); + + // Act + bool hasNext = chunk.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + avroReaderFactory.Verify(r => r.BuildAvroReader(lazyLoadingBlobStream.Object)); + avroReader.Verify(r => r.HasNext()); + } + + /// + /// Tests Chunk.Next() and the BlobChangeFeedEvent and BlobChangeFeedEventData constructors. + /// + [Test] + public async Task Next() + { + // Arrange + string chunkPath = "chunkPath"; + long blockOffset = 5; + long eventIndex = 10; + + string topic = "topic"; + string subject = "subject"; + string eventType = "BlobCreated"; + DateTimeOffset eventTime = new DateTimeOffset(2020, 4, 30, 8, 26, 30, TimeSpan.Zero); + Guid eventId = Guid.NewGuid(); + long dataVersion = 1; + string metadataVersion = "1"; + + string api = "CreateBlob"; + Guid clientRequestId = Guid.NewGuid(); + Guid requestId = Guid.NewGuid(); + ETag etag = new ETag("0x8D75EF45A3B8617"); + string contentType = "contentType"; + long contentLength = Constants.KB; + string blobType = "BlockBlob"; + long contentOffset = 5; + Uri destinationUri = new Uri("https://www.destination.com"); + Uri sourceUri = new Uri("https://www.source.com"); + Uri uri = new Uri("https://www.uri.com"); + bool recursive = true; + string sequencer = "sequencer"; + + Dictionary record = new Dictionary + { + { Constants.ChangeFeed.Event.Topic, topic }, + { Constants.ChangeFeed.Event.Subject, subject }, + { Constants.ChangeFeed.Event.EventType, eventType }, + { Constants.ChangeFeed.Event.EventTime, eventTime.ToString() }, + { Constants.ChangeFeed.Event.EventId, eventId.ToString() }, + { Constants.ChangeFeed.Event.DataVersion, dataVersion }, + { Constants.ChangeFeed.Event.MetadataVersion, metadataVersion }, + { Constants.ChangeFeed.Event.Data, new Dictionary + { + { Constants.ChangeFeed.EventData.Api, api }, + { Constants.ChangeFeed.EventData.ClientRequestId, clientRequestId.ToString() }, + { Constants.ChangeFeed.EventData.RequestId, requestId.ToString() }, + { Constants.ChangeFeed.EventData.Etag, etag.ToString() }, + { Constants.ChangeFeed.EventData.ContentType, contentType }, + { Constants.ChangeFeed.EventData.ContentLength, contentLength }, + { Constants.ChangeFeed.EventData.BlobType, blobType }, + { Constants.ChangeFeed.EventData.ContentOffset, contentOffset }, + { Constants.ChangeFeed.EventData.DestinationUrl, destinationUri.ToString() }, + { Constants.ChangeFeed.EventData.SourceUrl, sourceUri.ToString() }, + { Constants.ChangeFeed.EventData.Url, uri.ToString() }, + { Constants.ChangeFeed.EventData.Recursive, recursive }, + { Constants.ChangeFeed.EventData.Sequencer, sequencer } + } + } + }; + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); + Mock avroReader = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock dataStream = new Mock(MockBehavior.Strict); + Mock headStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.SetupSequence(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(dataStream.Object) + .Returns(headStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())).Returns(avroReader.Object); + avroReader.Setup(r => r.HasNext()).Returns(true); + avroReader.Setup(r => r.Next( + It.IsAny(), + It.IsAny())) + .ReturnsAsync(record); + + avroReader.Setup(r => r.BlockOffset).Returns(blockOffset); + avroReader.Setup(r => r.ObjectIndex).Returns(eventIndex); + + ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + chunkPath, + blockOffset, + eventIndex); + + // Act + BlobChangeFeedEvent changeFeedEvent = await chunk.Next(IsAsync); + + // Assert + Assert.AreEqual(topic, changeFeedEvent.Topic); + Assert.AreEqual(subject, changeFeedEvent.Subject); + Assert.AreEqual(BlobChangeFeedEventType.BlobCreated, changeFeedEvent.EventType); + Assert.AreEqual(eventTime, changeFeedEvent.EventTime); + Assert.AreEqual(eventId, changeFeedEvent.Id); + Assert.AreEqual(dataVersion, changeFeedEvent.DataVersion); + Assert.AreEqual(metadataVersion, changeFeedEvent.MetadataVersion); + + Assert.AreEqual(api, changeFeedEvent.EventData.Api); + Assert.AreEqual(clientRequestId, changeFeedEvent.EventData.ClientRequestId); + Assert.AreEqual(requestId, changeFeedEvent.EventData.RequestId); + Assert.AreEqual(etag, changeFeedEvent.EventData.ETag); + Assert.AreEqual(contentType, changeFeedEvent.EventData.ContentType); + Assert.AreEqual(contentLength, changeFeedEvent.EventData.ContentLength); + Assert.AreEqual(BlobType.Block, changeFeedEvent.EventData.BlobType); + Assert.AreEqual(contentOffset, changeFeedEvent.EventData.ContentOffset); + Assert.AreEqual(destinationUri, changeFeedEvent.EventData.DestinationUri); + Assert.AreEqual(sourceUri, changeFeedEvent.EventData.SourceUri); + Assert.AreEqual(uri, changeFeedEvent.EventData.Uri); + Assert.AreEqual(recursive, changeFeedEvent.EventData.Recursive); + Assert.AreEqual(sequencer, changeFeedEvent.EventData.Sequencer); + + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + blockOffset, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + 3 * Constants.KB)); + avroReaderFactory.Verify(r => r.BuildAvroReader( + dataStream.Object, + headStream.Object, + blockOffset, + eventIndex)); + avroReader.Verify(r => r.HasNext()); + avroReader.Verify(r => r.Next( + IsAsync, + default)); + avroReader.Verify(r => r.BlockOffset); + avroReader.Verify(r => r.ObjectIndex); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs new file mode 100644 index 0000000000000..c23211c41a1cf --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using Azure.Storage.Test; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class LazyLoadingBlobStreamTests : ChangeFeedTestBase + { + public LazyLoadingBlobStreamTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + /// + /// Tests Read() with various sized Reads(). + /// + [Test] + public async Task ReadAsync() + { + // Arrange + await using DisposingContainer test = await GetTestContainerAsync(); + + // Arrange + int length = Constants.KB; + byte[] exectedData = GetRandomBuffer(length); + BlobClient blobClient = InstrumentClient(test.Container.GetBlobClient(GetNewBlobName())); + using (var stream = new MemoryStream(exectedData)) + { + await blobClient.UploadAsync(stream); + } + LazyLoadingBlobStream lazyStream = new LazyLoadingBlobStream(blobClient, offset: 0, blockSize: 157); + byte[] actualData = new byte[length]; + int offset = 0; + + // Act + int count = 0; + while (offset + count < length) + { + for (count = 6; count < 37; count += 6) + { + await lazyStream.ReadAsync(actualData, offset, count); + offset += count; + } + } + await lazyStream.ReadAsync(actualData, offset, length - offset); + + // Assert + TestHelper.AssertSequenceEqual(exectedData, actualData); + } + + /// + /// Tests LazyBlobStream parameter validation. + /// + [Test] + public async Task ReadAsync_InvalidParameterTests() + { + // Arrange + BlobClient blobClient = new BlobClient(new Uri("https://www.doesntmatter.com")); + LazyLoadingBlobStream lazyStream = new LazyLoadingBlobStream(blobClient, offset: 0, blockSize: Constants.KB); + + // Act + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: null, offset: 0, count: 10), + e => Assert.AreEqual($"buffer cannot be null.{Environment.NewLine}Parameter name: buffer", e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: -1, count: 10), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset cannot be less than 0.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 11, count: 10), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset cannot exceed buffer length.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 1, count: -1), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: count cannot be less than 0.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 5, count: 15), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset + count cannot exceed buffer length.", + e.Message)); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json new file mode 100644 index 0000000000000..501543e2903fc --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json @@ -0,0 +1,12 @@ +{ + "version": 0, + "lastConsumable": "2020-05-04T19:00:00.000Z", + "storageDiagnostics": { + "version": 0, + "lastModifiedTime": "2020-05-04T19:25:09.594Z", + "data": { + "aid": "a6b895a0-7006-0041-0049-22cadf06029a", + "lfz": "2020-04-29T06:00:00.000Z" + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json new file mode 100644 index 0000000000000..21b93ea966a30 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json @@ -0,0 +1,26 @@ +{ + "version": 0, + "begin": "2020-03-25T02:00:00.000Z", + "intervalSecs": 3600, + "status": "Finalized", + "config": { + "version": 0, + "configVersionEtag": "0x8d7d063fb40542c", + "numShards": 1, + "recordsFormat": "avro", + "formatSchemaVersion": 3, + "shardDistFnVersion": 1 + }, + "chunkFilePaths": [ + "$blobchangefeed/log/00/2020/03/25/0200/", + "$blobchangefeed/log/01/2020/03/25/0200/", + "$blobchangefeed/log/02/2020/03/25/0200/" + ], + "storageDiagnostics": { + "version": 0, + "lastModifiedTime": "2020-03-25T02:26:53.186Z", + "data": { + "aid": "61410c64-2006-0001-004c-02cde706e9dc" + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs new file mode 100644 index 0000000000000..7afe32cdbd6d0 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -0,0 +1,271 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Reflection; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class SegmentTests : ChangeFeedTestBase + { + public SegmentTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + /// + /// Test building a Segment with a SegmentCursor, and then calling Segment.GetCursor(). + /// + [Test] + public async Task GetCursor() + { + // Arrange + string manifestPath = "idx/segments/2020/03/25/0200/meta.json"; + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock shardFactory = new Mock(MockBehavior.Strict); + + List> shards = new List>(); + int shardCount = 3; + for (int i = 0; i < shardCount; i++) + { + shards.Add(new Mock(MockBehavior.Strict)); + } + + List shardCursors = new List + { + new ShardCursor(1, 2, 3), + new ShardCursor(4, 5, 6), + new ShardCursor(7, 8, 9) + }; + + DateTimeOffset dateTime = new DateTimeOffset(2020, 3, 25, 2, 0, 0, TimeSpan.Zero); + int shardIndex = 1; + + SegmentCursor expectedCursor = new SegmentCursor( + dateTime, + shardCursors, + shardIndex); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + shardFactory.SetupSequence(r => r.BuildShard( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(shards[0].Object) + .ReturnsAsync(shards[1].Object) + .ReturnsAsync(shards[2].Object); + + for (int i = 0; i < shardCount; i++) + { + shards[i].Setup(r => r.GetCursor()).Returns(shardCursors[i]); + } + + SegmentFactory segmentFactory = new SegmentFactory( + containerClient.Object, + shardFactory.Object); + Segment segment = await segmentFactory.BuildSegment( + IsAsync, + manifestPath, + expectedCursor); + + // Act + SegmentCursor cursor = segment.GetCursor(); + + // Assert + Assert.AreEqual(expectedCursor.SegmentTime, cursor.SegmentTime); + Assert.AreEqual(expectedCursor.ShardCursors.Count, cursor.ShardCursors.Count); + for (int i = 0; i < shardCount; i++) + { + Assert.AreEqual(expectedCursor.ShardCursors[i].BlockOffset, cursor.ShardCursors[i].BlockOffset); + Assert.AreEqual(expectedCursor.ShardCursors[i].ChunkIndex, cursor.ShardCursors[i].ChunkIndex); + Assert.AreEqual(expectedCursor.ShardCursors[i].EventIndex, cursor.ShardCursors[i].EventIndex); + } + Assert.AreEqual(shardIndex, cursor.ShardIndex); + + containerClient.Verify(r => r.GetBlobClient(manifestPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + for (int i = 0; i < shards.Count; i++) + { + shardFactory.Verify(r => r.BuildShard( + IsAsync, + $"log/0{i}/2020/03/25/0200/", + shardCursors[i])); + } + } + + /// + /// In this test, the Segment has 3 Shards and 5 total Events. + /// Shard index 0 and 1 have 2 Events, and Shard index 2 has 1 Event. + /// We are round-robining the Shards, so we will return the events for + /// the shards indexes: 0 1 2 0 1. + /// + [Test] + public async Task GetPage() + { + // Arrange + string manifestPath = "idx/segments/2020/03/25/0200/meta.json"; + int shardCount = 3; + int eventCount = 5; + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock shardFactory = new Mock(MockBehavior.Strict); + + List> shards = new List>(); + + for (int i = 0; i < shardCount; i++) + { + shards.Add(new Mock(MockBehavior.Strict)); + } + + List eventIds = new List(); + for (int i = 0; i < eventCount; i++) + { + eventIds.Add(Guid.NewGuid()); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + shardFactory.SetupSequence(r => r.BuildShard( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(shards[0].Object) + .ReturnsAsync(shards[1].Object) + .ReturnsAsync(shards[2].Object); + + // Set up Shards + shards[0].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[0] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[3] + })); + + shards[0].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + shards[1].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[1] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[4] + })); + + shards[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + shards[2].Setup(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[2] + })); + + shards[2].Setup(r => r.HasNext()) + .Returns(false); + + SegmentFactory segmentFactory = new SegmentFactory( + containerClient.Object, + shardFactory.Object); + Segment segment = await segmentFactory.BuildSegment( + IsAsync, + manifestPath); + + // Act + List events = await segment.GetPage(IsAsync, 25); + + // Assert + Assert.AreEqual(eventCount, events.Count); + for (int i = 0; i < eventCount; i++) + { + Assert.AreEqual(eventIds[i], events[i].Id); + } + + containerClient.Verify(r => r.GetBlobClient(manifestPath)); + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + for (int i = 0; i < shards.Count; i++) + { + shardFactory.Verify(r => r.BuildShard( + IsAsync, + $"log/0{i}/2020/03/25/0200/", + default)); + } + + shards[0].Verify(r => r.Next(IsAsync, default)); + shards[0].Verify(r => r.HasNext()); + shards[1].Verify(r => r.Next(IsAsync, default)); + shards[1].Verify(r => r.HasNext()); + shards[2].Verify(r => r.Next(IsAsync, default)); + shards[2].Verify(r => r.HasNext()); + shards[0].Verify(r => r.Next(IsAsync, default)); + shards[0].Verify(r => r.HasNext()); + shards[1].Verify(r => r.Next(IsAsync, default)); + shards[1].Verify(r => r.HasNext()); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json new file mode 100644 index 0000000000000..a04d62f548b2f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json @@ -0,0 +1,418 @@ +{ + "Entries": [ + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90?restype=container", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-29b6b9e18beba24ea43664ce40b1b41c-7635994c96806c49-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-public-access": "container", + "x-ms-client-request-id": "37d9033a-5b52-e093-6ae2-6fc8d4b9a256", + "x-ms-date": "Fri, 01 May 2020 21:18:02 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:02 GMT", + "ETag": "\u00220x8D7EE152212CB76\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:02 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "37d9033a-5b52-e093-6ae2-6fc8d4b9a256", + "x-ms-request-id": "b2b67fd9-b01e-0095-06fd-1fa515000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "Content-Length": "1024", + "If-None-Match": "*", + "traceparent": "00-8056c19e6e2cbc4f90faa1c4a421cd3d-b1a6adc2fb891f4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "154dcba4-b4de-b434-753a-926f59fb8342", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": "hunS36L2h2Xh0pWCMKtts3PsZ7irlz59mXeGfwCSvT5VTtkbKk3ZQ3x0VYDGEK\u002B\u002BdbxV6FLpqu6rVUYrcAdlTxRWLeHRUYglEyM8N0K\u002Bt4nHxwlEPdXra6PrcNUHrtN0DwZrb\u002BPN7SQ0L9QvrXOr7J29Y6n\u002BIBE\u002Ba8E95uCyHmVQxyc8Ukel\u002BtrlDw0AvL4wP4M3SUopdu70LQQijwZDgRFv3uZLb5pVF2nPGBADPq4CDadce9QaJ1A5SCVDg2rToo78kN7Y5M/p4TLgWV7d8O9WH4Fe5wU8VbeVEid1EpT/\u002B3aCB63QKinXc5miOc3yXFc27WPodLuIwDsmQtGoc8\u002Bd4V3nWuhBhfVYh3NsmRvqorXPPyQwF4e7\u002BUdxqedXL//KBFhvCwFu9GdukO1HHCYgPJ8c2DSqbcrWLGGh0NwNAFd0opXDepezYEu9woaZdNwhUG1HO1fKH5eNmd6gzSOLYI\u002BV0nqQXMEHaHfp7KpDPLeEgH9vxcUAqYL9NeG5E5MnMmH3BKFikPSrQNLf4TPsSlwmG0ykG5rKlSj3kyfgTlXjwrpJTDgWIu\u002BLxIXyIMhPEeuEkvhGgxbGLG54av3MyvF9\u002Bx54o/4kEPlcGFcV2HRgqIE8W7xdnD6jgh/JnbfyvvmUdSSp1m7tYIL/6Wq/sVT6GMJ7kG41IF/f1SDeWnJBalldgWJPttETTvo6XbPU49HLz4X0di6LXlXFyQFNDgYPvX8RYi/xqmLxaEFz\u002BsKsC/FOEortTfMr0iINiezQxUD0KqAzwoGh2SUjAw7sF2STGb0Q//t8InCL5pAN5CB1IaIBf9AieElwrDllyUnkPru\u002BeW8uwMZEeAgvp9zxlPQEreJs65eTcKF0n1LCFlgnpqjVT2qwRmHbvQfH8tBsfxhEYRxmeFIjKgjOuSB0gWGwCAE0G0WJU2aBkS6QzH7QCCA5frU8wC5VsSRi4oxAZjulF\u002BrPOGPTug215eYkk12K/bH/OgOrdEfS32ogJ3HaQCry6rWTAdC9ZsOohttHMB7G1\u002BTbQCw4\u002BaKi/Fj4spOc26S8Mqio9vzhQtRAA\u002B1siq5LWVCM/gD\u002BuvES8QR3xLMwC6s3iv52HuDo\u002Bf5X\u002BPW0NnKqrVlCD9ykvni3lQX3an4gEPhCNf\u002Bv1TpvowUBpsH7XbuSzBZa7fvPksyGvona8Y5/N\u002Bf9cgBy\u002Btce0mvWohi2vDdKZJ5nxiL8cqIzKRaxlbJ3gm/a/eu6INE/PBqP\u002BPC00AJSkIzwF3DFAf9ePtHBbT25Xx\u002Bo3L5MkETMKIoIbHqxflgTXwQmhmr\u002BFPzrXOH/fs/c1pnQC/REBQBHJO/mvZSFrMB5rQ04k9i1qsfBYTNjzKq7K5Xh4Q==", + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Content-MD5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "Date": "Fri, 01 May 2020 21:18:02 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "154dcba4-b4de-b434-753a-926f59fb8342", + "x-ms-content-crc64": "sFHvOXWQOCg=", + "x-ms-request-id": "b2b67fff-b01e-0095-28fd-1fa515000000", + "x-ms-request-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-07535d5ba97159438e3e5004dd10816d-f099c446f5756e45-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "e5c169c5-71d4-0df2-6f8f-4ae1cebd2aa9", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=0-156", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 0-156/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "e5c169c5-71d4-0df2-6f8f-4ae1cebd2aa9", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68008-b01e-0095-30fd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "hunS36L2h2Xh0pWCMKtts3PsZ7irlz59mXeGfwCSvT5VTtkbKk3ZQ3x0VYDGEK\u002B\u002BdbxV6FLpqu6rVUYrcAdlTxRWLeHRUYglEyM8N0K\u002Bt4nHxwlEPdXra6PrcNUHrtN0DwZrb\u002BPN7SQ0L9QvrXOr7J29Y6n\u002BIBE\u002Ba8E95uCyHmVQxyc8Ukel\u002BtrlDw0AvL4wP4M3SUopdu70LQQijw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-3016bd7d6293504b956a0541cb89c807-3d54089df7699c4c-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "f0a9a604-9d39-d46e-e6ec-17c2d5e44278", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=157-313", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 157-313/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "f0a9a604-9d39-d46e-e6ec-17c2d5e44278", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6802f-b01e-0095-55fd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "BkOBEW/e5ktvmlUXac8YEAM\u002BrgINp1x71BonUDlIJUODatOijvyQ3tjkz\u002BnhMuBZXt3w71YfgV7nBTxVt5USJ3USlP/7doIHrdAqKddzmaI5zfJcVzbtY\u002Bh0u4jAOyZC0ahzz53hXeda6EGF9ViHc2yZG\u002Bqitc8/JDAXh7v5R3Gp51cv/8oEWG8LAW70Z26Q7UccJiA8nxzYNKptyg==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-0fe3d3744d71064ea107d1648e7348ac-b7947c7b3fb7d948-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "27dcdfd5-3a14-ab6f-333d-8545f3eaad21", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=314-470", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 314-470/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "27dcdfd5-3a14-ab6f-333d-8545f3eaad21", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68038-b01e-0095-5dfd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "1ixhodDcDQBXdKKVw3qXs2BLvcKGmXTcIVBtRztXyh\u002BXjZneoM0ji2CPldJ6kFzBB2h36eyqQzy3hIB/b8XFAKmC/TXhuROTJzJh9wShYpD0q0DS3\u002BEz7EpcJhtMpBuaypUo95Mn4E5V48K6SUw4FiLvi8SF8iDITxHrhJL4RoMWxixueGr9zMrxffseeKP\u002BJBD5XBhXFdh0YKiBPA==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-c252f3489f4e9844a13ff531b9ec3daa-25b062df34267d4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "f6eda3df-1b07-e764-9b42-25bc4375c7d2", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=471-627", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 471-627/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "f6eda3df-1b07-e764-9b42-25bc4375c7d2", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68049-b01e-0095-6bfe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "W7xdnD6jgh/JnbfyvvmUdSSp1m7tYIL/6Wq/sVT6GMJ7kG41IF/f1SDeWnJBalldgWJPttETTvo6XbPU49HLz4X0di6LXlXFyQFNDgYPvX8RYi/xqmLxaEFz\u002BsKsC/FOEortTfMr0iINiezQxUD0KqAzwoGh2SUjAw7sF2STGb0Q//t8InCL5pAN5CB1IaIBf9AieElwrDllyUnkPg==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-fcc9ad0ca7995f4da7568fee4bbcdead-15d355a94228a74e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "5066fefa-b7fc-dd19-0189-58ca3bb5f877", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=628-784", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 628-784/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "5066fefa-b7fc-dd19-0189-58ca3bb5f877", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6804c-b01e-0095-6efe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "u755by7AxkR4CC\u002Bn3PGU9ASt4mzrl5NwoXSfUsIWWCemqNVParBGYdu9B8fy0Gx/GERhHGZ4UiMqCM65IHSBYbAIATQbRYlTZoGRLpDMftAIIDl\u002BtTzALlWxJGLijEBmO6UX6s84Y9O6DbXl5iSTXYr9sf86A6t0R9LfaiAncdpAKvLqtZMB0L1mw6iG20cwHsbX5NtALDj5oqL8WA==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-6a7127f21ef82743991e88106d432734-2a061e027738f345-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "45c1c755-770e-d7d5-a4c3-2b64c09c39b8", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=785-941", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 785-941/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "45c1c755-770e-d7d5-a4c3-2b64c09c39b8", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68057-b01e-0095-79fe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "\u002BLKTnNukvDKoqPb84ULUQAPtbIquS1lQjP4A/rrxEvEEd8SzMAurN4r\u002Bdh7g6Pn\u002BV/j1tDZyqq1ZQg/cpL54t5UF92p\u002BIBD4QjX/r9U6b6MFAabB\u002B127kswWWu37z5LMhr6J2vGOfzfn/XIAcvrXHtJr1qIYtrw3SmSeZ8Yi/HKiMykWsZWyd4Jv2v3ruiDRPzwaj/jwtNACUpCM8A==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-4d8d0c28cb9f104e8dd0061cf69c7b38-32a27dc310b00c4f-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "747431a8-b2f3-af68-8c94-5b71eb3305a0", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=942-1098", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "82", + "Content-Range": "bytes 942-1023/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "747431a8-b2f3-af68-8c94-5b71eb3305a0", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6807b-b01e-0095-1cfe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "F3DFAf9ePtHBbT25Xx\u002Bo3L5MkETMKIoIbHqxflgTXwQmhmr\u002BFPzrXOH/fs/c1pnQC/REBQBHJO/mvZSFrMB5rQ04k9i1qsfBYTNjzKq7K5Xh4Q==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90?restype=container", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-f65c24cb0ab5a644b1cdb308cb000d29-eb5a238f2825e048-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "8fbc69c5-c427-f5c5-5a4e-187291b7f932", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 202, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "8fbc69c5-c427-f5c5-5a4e-187291b7f932", + "x-ms-request-id": "b2b68096-b01e-0095-37fe-1fa515000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + } + ], + "Variables": { + "RandomSeed": "792381840", + "Storage_TestConfigDefault": "ProductionTenant\nseanmcccanary\nU2FuaXRpemVk\nhttps://seanmcccanary.blob.core.windows.net\nhttps://seanmcccanary.file.core.windows.net\nhttps://seanmcccanary.queue.core.windows.net\nhttps://seanmcccanary.table.core.windows.net\n\n\n\n\nhttps://seanmcccanary-secondary.blob.core.windows.net\nhttps://seanmcccanary-secondary.file.core.windows.net\nhttps://seanmcccanary-secondary.queue.core.windows.net\nhttps://seanmcccanary-secondary.table.core.windows.net\n\nSanitized\n\n\nCloud\nBlobEndpoint=https://seanmcccanary.blob.core.windows.net/;QueueEndpoint=https://seanmcccanary.queue.core.windows.net/;FileEndpoint=https://seanmcccanary.file.core.windows.net/;BlobSecondaryEndpoint=https://seanmcccanary-secondary.blob.core.windows.net/;QueueSecondaryEndpoint=https://seanmcccanary-secondary.queue.core.windows.net/;FileSecondaryEndpoint=https://seanmcccanary-secondary.file.core.windows.net/;AccountName=seanmcccanary;AccountKey=Sanitized\nseanscope1" + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json new file mode 100644 index 0000000000000..3f44471337f09 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json @@ -0,0 +1,418 @@ +{ + "Entries": [ + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9?restype=container", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ab3ce3ce807b344bb4f46c41c58e4a63-b15c2830fb726b4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-public-access": "container", + "x-ms-client-request-id": "3e9eb20a-7b8e-30d1-ba02-498361189a39", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE1522F2D80E\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "3e9eb20a-7b8e-30d1-ba02-498361189a39", + "x-ms-request-id": "099de8d5-e01e-0043-14fe-1fabfc000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "Content-Length": "1024", + "If-None-Match": "*", + "traceparent": "00-d8a03fb949beb4419d83cde01f21fe05-4c41fdfcc7972a49-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "4d391c40-2c2a-907d-3446-b186ad21ec62", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": "Xvph0NiOB\u002BUXICslttuQJWXp6tVol1sN9VHlbLkDFfOR5l7yjlrafKu4J25Qq3nfciQdeC7F0MeeVNuHqZssEwo4lNB9nZcLDm4pr3DtvUnXBvqV5p3FyxWXVDl\u002BU9MQjBNjGBepYdSMEWq/nCFXsZeSr6Khli1tcsm75kldjbqaGSJciaUxLqQlCdQzBS/0P6Ki7KMwb/bm9qVO7nuLLoSvJYL\u002B\u002BifyYIH\u002Be4xZOW2Zx6amuqAXaqosdpP/40AomtpPg\u002BCs9UR7sIwhMvNpjbw4tCbV5HTkl7rY0YSU8n1B1eGyCbaM7NFhKgoU9Ti2wunWxy4ZU7Vma\u002Bsl1CPA5r4O4FwStoMGq8dVc7lpz6KCIQ6bvStOKIIrTJ3KZhitoUNMBRXbpAtl0Btmw9/7/JxJo7IVJSmSwLfa5mrmPEWvqNJ1HcmMpn0CNe0Q52SjME8AgMNlmHwauUVdAb2hfpPWtxsNhd7Svfw1Eu87gthTN7Ya5ryBSQ9VW2SELqXAYbdZOi9JlapxLsPHMIaN3jdsCsoGhpjXVG4Ltyp6TY5WdLA44keoCaEMqbj5uhmJyHwsGCTaudoegeXBPd5/9\u002BA0cLvVVfzvwwAp\u002Bt3g3ppVge6uTTVpe7rVGWBh58vCjLHXxElds3N/PFQjsb/9PqJLbLvw0amqyvQA/zubjtw1Z20rCBpQ9816LhHhRsr62Nq\u002BFnoP0boILflVXLQ45K537kpS0TWI\u002B3TtdqOYyYQqBidh3XhhJZWyI9sg\u002BpsZQkliaVh/qAekNRoGIlsXcGR1K1aN1gY2Fsm0n65aZwDZ1DRZNV8sql0fvKddWwI6m7J3q89FjGtDLFOOyghYSs4aLE0nvTsESYD4bRpe2LrGegT747swk0fIFggMnd9UxnLZQy60Et13cmJOTB9R5FOZyFAEXWYS6ZgGXqcy\u002BKuho6anhZWLEWgmhHBZ4kcndzK1ctWPUGkLxlqVrrQ2RXq2rKh4qV6\u002BDrbo3tj1uAdR3ud1fIXWElM8m7469WO07z72ozWer83T9mQep3GQFg1TRM/Mwcj035pVPA2ggxhTM\u002B6a/xXRsy8ZoYYhuGvhrIWw57GT5y52AsObSXPAM3NLynOD9hb4gZCgsx9C0ycuc4\u002BH75tnwEzAI/xSNmq8WW4S1e34XrVvuGYg8zQOMy\u002BbbNXjOETX5AomwtIZpUom41mwL/06NhXZGDY1R80Yse/Nt8qV2SEqO/Hj0ZvjVRIb\u002BQ/aXQQxYTa/CR8NoZw8DhDiTO6AkQo4hvMMVmR69sMjGk6\u002BPVPrnFWT1c8BHZlUp8OKAAgkmmcEodcQJArSRRJb6YZOftC997CY7Lw6NmhZt6\u002Bg7/W7/3/muZx82g==", + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Content-MD5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "4d391c40-2c2a-907d-3446-b186ad21ec62", + "x-ms-content-crc64": "Bps5Y\u002BRozCA=", + "x-ms-request-id": "099de8e2-e01e-0043-1efe-1fabfc000000", + "x-ms-request-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ae29abac71f6c14187f12b46be8059b9-43868325c9b26b4c-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "ed8cbe63-3d1f-7f83-4bda-3a6fbbd48434", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=0-156", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 0-156/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "ed8cbe63-3d1f-7f83-4bda-3a6fbbd48434", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de8f4-e01e-0043-30fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "Xvph0NiOB\u002BUXICslttuQJWXp6tVol1sN9VHlbLkDFfOR5l7yjlrafKu4J25Qq3nfciQdeC7F0MeeVNuHqZssEwo4lNB9nZcLDm4pr3DtvUnXBvqV5p3FyxWXVDl\u002BU9MQjBNjGBepYdSMEWq/nCFXsZeSr6Khli1tcsm75kldjbqaGSJciaUxLqQlCdQzBS/0P6Ki7KMwb/bm9qVO7g==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ac9d2951125b6e449407e72923c2d6fb-0507008691aaab48-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "ee23123a-ff95-3743-4dba-a356d3b44e1b", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=157-313", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 157-313/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "ee23123a-ff95-3743-4dba-a356d3b44e1b", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de924-e01e-0043-5efe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "e4suhK8lgv76J/Jggf57jFk5bZnHpqa6oBdqqix2k//jQCia2k\u002BD4Kz1RHuwjCEy82mNvDi0JtXkdOSXutjRhJTyfUHV4bIJtozs0WEqChT1OLbC6dbHLhlTtWZr6yXUI8Dmvg7gXBK2gwarx1VzuWnPooIhDpu9K04ogitMncpmGK2hQ0wFFdukC2XQG2bD3/v8nEmjshUlKZLAtw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-773ae73907ab234a8b10fa5dfff6d943-da45c684dc228442-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "a9344e50-95b0-309e-fa3e-e77131ac6043", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=314-470", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 314-470/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "a9344e50-95b0-309e-fa3e-e77131ac6043", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de938-e01e-0043-6ffe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "2uZq5jxFr6jSdR3JjKZ9AjXtEOdkozBPAIDDZZh8GrlFXQG9oX6T1rcbDYXe0r38NRLvO4LYUze2Gua8gUkPVVtkhC6lwGG3WTovSZWqcS7DxzCGjd43bArKBoaY11RuC7cqek2OVnSwOOJHqAmhDKm4\u002BboZich8LBgk2rnaHoHlwT3ef/fgNHC71VX878MAKfrd4N6aVYHurk01aQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-a1d1cd6f819eda48bf0a01984df68d90-8e297298295d814e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "8406352d-a47f-766a-673e-ce96ee005c0f", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=471-627", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 471-627/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "8406352d-a47f-766a-673e-ce96ee005c0f", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de946-e01e-0043-7dfe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "e7rVGWBh58vCjLHXxElds3N/PFQjsb/9PqJLbLvw0amqyvQA/zubjtw1Z20rCBpQ9816LhHhRsr62Nq\u002BFnoP0boILflVXLQ45K537kpS0TWI\u002B3TtdqOYyYQqBidh3XhhJZWyI9sg\u002BpsZQkliaVh/qAekNRoGIlsXcGR1K1aN1gY2Fsm0n65aZwDZ1DRZNV8sql0fvKddWwI6m7J3qw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-6a755e782694a04d8a1ec8449e3118bb-9a085cd6c135f543-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "fc4482e6-dca6-3bc3-d817-842ff380759f", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=628-784", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 628-784/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "fc4482e6-dca6-3bc3-d817-842ff380759f", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de94f-e01e-0043-04fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "z0WMa0MsU47KCFhKzhosTSe9OwRJgPhtGl7YusZ6BPvjuzCTR8gWCAyd31TGctlDLrQS3XdyYk5MH1HkU5nIUARdZhLpmAZepzL4q6GjpqeFlYsRaCaEcFniRyd3MrVy1Y9QaQvGWpWutDZFerasqHipXr4Otuje2PW4B1He53V8hdYSUzybvjr1Y7TvPvajNZ6vzdP2ZB6ncZAWDQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-34b2384957375641ba7af5a5ff753d0a-7ecdbd2fd7028041-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "6baf2718-5fbb-1b69-eed3-c3431cd19bb7", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=785-941", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 785-941/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "6baf2718-5fbb-1b69-eed3-c3431cd19bb7", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de96c-e01e-0043-20fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "U0TPzMHI9N\u002BaVTwNoIMYUzPumv8V0bMvGaGGIbhr4ayFsOexk\u002BcudgLDm0lzwDNzS8pzg/YW\u002BIGQoLMfQtMnLnOPh\u002B\u002BbZ8BMwCP8UjZqvFluEtXt\u002BF61b7hmIPM0DjMvm2zV4zhE1\u002BQKJsLSGaVKJuNZsC/9OjYV2Rg2NUfNGLHvzbfKldkhKjvx49Gb41USG/kP2l0EMWE2vwkfDQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-75be2878e805fc47b8f43ddbed4243c4-d1f6d6a46b935b4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "cb178003-09d0-d9e4-b928-6a70f8c76e64", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=942-1098", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "82", + "Content-Range": "bytes 942-1023/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "cb178003-09d0-d9e4-b928-6a70f8c76e64", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de979-e01e-0043-2bfe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "oZw8DhDiTO6AkQo4hvMMVmR69sMjGk6\u002BPVPrnFWT1c8BHZlUp8OKAAgkmmcEodcQJArSRRJb6YZOftC997CY7Lw6NmhZt6\u002Bg7/W7/3/muZx82g==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9?restype=container", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-c58a271797ebe24ab7e134cf5772365d-d804b1efc6df294e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "9c3f3a33-cfb9-50e6-8508-86f663f231b1", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 202, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "9c3f3a33-cfb9-50e6-8508-86f663f231b1", + "x-ms-request-id": "099de983-e01e-0043-34fe-1fabfc000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + } + ], + "Variables": { + "RandomSeed": "1584867388", + "Storage_TestConfigDefault": "ProductionTenant\nseanmcccanary\nU2FuaXRpemVk\nhttps://seanmcccanary.blob.core.windows.net\nhttps://seanmcccanary.file.core.windows.net\nhttps://seanmcccanary.queue.core.windows.net\nhttps://seanmcccanary.table.core.windows.net\n\n\n\n\nhttps://seanmcccanary-secondary.blob.core.windows.net\nhttps://seanmcccanary-secondary.file.core.windows.net\nhttps://seanmcccanary-secondary.queue.core.windows.net\nhttps://seanmcccanary-secondary.table.core.windows.net\n\nSanitized\n\n\nCloud\nBlobEndpoint=https://seanmcccanary.blob.core.windows.net/;QueueEndpoint=https://seanmcccanary.queue.core.windows.net/;FileEndpoint=https://seanmcccanary.file.core.windows.net/;BlobSecondaryEndpoint=https://seanmcccanary-secondary.blob.core.windows.net/;QueueSecondaryEndpoint=https://seanmcccanary-secondary.queue.core.windows.net/;FileSecondaryEndpoint=https://seanmcccanary-secondary.file.core.windows.net/;AccountName=seanmcccanary;AccountKey=Sanitized\nseanscope1" + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs new file mode 100644 index 0000000000000..1a7398f3000ae --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -0,0 +1,613 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Moq; +using NUnit.Framework; +using System.Threading.Tasks; +using Azure.Storage.Blobs.Models; +using Azure.Core; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ShardTests : ChangeFeedTestBase + { + public ShardTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + /// + /// Tests creating a Shard with a ShardCursor, and then calling Shard.GetCursor(). + /// + [Test] + public async Task GetCursor() + { + // Arrange + string shardPath = "shardPath"; + long chunkIndex = 2; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + chunk.Setup(r => r.BlockOffset).Returns(blockOffset); + chunk.Setup(r => r.EventIndex).Returns(eventIndex); + + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); + + // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + shardPath, + shardCursor) + .ConfigureAwait(false); + + + ShardCursor cursor = shard.GetCursor(); + + // Assert + Assert.AreEqual(chunkIndex, cursor.ChunkIndex); + Assert.AreEqual(blockOffset, cursor.BlockOffset); + Assert.AreEqual(eventIndex, cursor.EventIndex); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + "chunk2", + blockOffset, + eventIndex)); + + chunk.Verify(r => r.BlockOffset); + chunk.Verify(r => r.EventIndex); + } + + /// + /// Tests Shard.HasNext(). + /// + [Test] + public async Task HasNext_False() + { + // Arrange + string shardPath = "shardPath"; + long chunkIndex = 5; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + chunk.Setup(r => r.HasNext()).Returns(false); + + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); + + // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + shardPath, + shardCursor) + .ConfigureAwait(false); + + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + "chunk5", + blockOffset, + eventIndex)); + + chunk.Verify(r => r.HasNext()); + } + + /// + /// Tests Shard.HasNext(). + /// + [Test] + public async Task HasNext_ChunksLeft() + { + // Arrange + string shardPath = "shardPath"; + long chunkIndex = 2; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); + + // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + shardPath, + shardCursor) + .ConfigureAwait(false); + + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + "chunk2", + blockOffset, + eventIndex)); + } + + /// + /// Tests Shard.HasNext(). + /// + [Test] + public async Task HasNext_CurrentChunkHasNext() + { + // Arrange + string shardPath = "shardPath"; + long chunkIndex = 5; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + chunk.Setup(r => r.HasNext()).Returns(true); + + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); + + // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + shardPath, + shardCursor) + .ConfigureAwait(false); + + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + "chunk5", + blockOffset, + eventIndex)); + + chunk.Verify(r => r.HasNext()); + } + + /// + /// In this test, the Shard has 4 Chunks with 2 Events in each Chunk. + /// We call ShardFactory.BuildShard() with a ShardCursor, to create the Shard, + /// Shard.Next() 4 times, Shard.GetCursor(), and then Shard.Next 4 times. + /// + [Test] + public async Task Next() + { + // Arrange + int chunkCount = 4; + int eventCount = 8; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + List> chunks = new List>(); + + List expectedChangeFeedEvents = new List(); + for (int i = 0; i < eventCount; i++) + { + chunks.Add(new Mock(MockBehavior.Strict)); + expectedChangeFeedEvents.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } + + string shardPath = "shardPath"; + long chunkIndex = 2; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.SetupSequence(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunks[0].Object) + .Returns(chunks[1].Object) + .Returns(chunks[2].Object) + .Returns(chunks[3].Object); + + chunks[0].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + chunks[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + chunks[2].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + chunks[3].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(true) + .Returns(true) + .Returns(false); + + for (int i = 0; i < chunkCount; i++) + { + + chunks[i].SetupSequence(r => r.Next( + It.IsAny(), + default)) + .Returns(Task.FromResult(expectedChangeFeedEvents[2 * i])) + .Returns(Task.FromResult(expectedChangeFeedEvents[2 * i + 1])); + } + + chunks[2].Setup(r => r.BlockOffset).Returns(blockOffset); + chunks[2].Setup(r => r.EventIndex).Returns(eventIndex); + + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); + + // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + shardPath, + shardCursor) + .ConfigureAwait(false); + + List changeFeedEvents = new List(); + for (int i = 0; i < 4; i++) + { + changeFeedEvents.Add(await shard.Next(IsAsync)); + } + ShardCursor cursor = shard.GetCursor(); + for (int i = 0; i < 4; i++) + { + changeFeedEvents.Add(await shard.Next(IsAsync)); + } + + // Assert + for (int i = 0; i < eventCount; i++) + { + Assert.AreEqual(expectedChangeFeedEvents[i].Id, changeFeedEvents[i].Id); + } + + Assert.AreEqual(4, cursor.ChunkIndex); + Assert.AreEqual(eventIndex, cursor.EventIndex); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + "chunk2", + blockOffset, + eventIndex)); + chunkFactory.Verify(r => r.BuildChunk( + "chunk3", + default, + default)); + chunkFactory.Verify(r => r.BuildChunk( + "chunk4", + default, + default)); + chunkFactory.Verify(r => r.BuildChunk( + "chunk5", + default, + default)); + + for (int i = 0; i < chunkCount; i++) + { + chunks[i].Verify(r => r.Next(IsAsync, default), Times.Exactly(2)); + } + + chunks[0].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[1].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[2].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[3].Verify(r => r.HasNext(), Times.Exactly(4)); + + chunks[2].Verify(r => r.BlockOffset); + chunks[2].Verify(r => r.EventIndex); + } + + private static Task> GetChunkPagesFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetChunkPagesFunc(continuation, pageSizeHint)); + + private static Page GetChunkPagesFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk0", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk1", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk2", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk3", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk4", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk5", false, null)) + }); + } +} diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs index c971be7e6bfac..a6ae343f0fc58 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs @@ -302,6 +302,52 @@ internal static class Queue public const string UriSubDomain = "queue"; } + /// + /// ChangeFeed constant values. + /// + internal static class ChangeFeed + { + public const string ChangeFeedContainerName = "$blobchangefeed"; + public const string SegmentPrefix = "idx/segments/"; + public const string InitalizationManifestPath = "/0000/"; + public const string InitalizationSegment = "1601"; + public const string MetaSegmentsPath = "meta/segments.json"; + public const long ChunkBlockDownloadSize = MB; + public const int DefaultPageSize = 5000; + + internal static class Event + { + public const string Topic = "topic"; + public const string Subject = "subject"; + public const string EventType = "eventType"; + public const string EventTime = "eventTime"; + public const string EventId = "id"; + public const string Data = "data"; + public const string DataVersion = "dataVersion"; + public const string MetadataVersion = "metadataVersion"; + } + + internal static class EventData + { + public const string Api = "api"; + public const string ClientRequestId = "clientRequestId"; + public const string RequestId = "requestId"; + public const string Etag = "etag"; + public const string ContentType = "contentType"; + public const string ContentLength = "contentLength"; + public const string BlobType = "blobType"; + public const string BlockBlob = "BlockBlob"; + public const string PageBlob = "pageBlob"; + public const string AppendBlob = "AppendBlob"; + public const string ContentOffset = "contentOffset"; + public const string DestinationUrl = "destinationUrl"; + public const string SourceUrl = "sourceUrl"; + public const string Url = "url"; + public const string Recursive = "recursive"; + public const string Sequencer = "sequencer"; + } + } + /// /// Quick Query constant values. /// diff --git a/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs b/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs index 089cb0c264276..e180a2066017c 100644 --- a/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs +++ b/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs @@ -47,13 +47,13 @@ internal class AvroReader /// The byte offset within the Avro file (both header and data) /// of the start of the current block. /// - public long BlockOffset { get; private set; } + public virtual long BlockOffset { get; private set; } /// /// The index of the current object within the current block. /// /// - public long ObjectIndex { get; private set; } + public virtual long ObjectIndex { get; private set; } /// /// If this Avro Reader has been initalized. @@ -91,6 +91,11 @@ public AvroReader( _initalized = false; } + /// + /// Constructor for mocking. Do not use. + /// + public AvroReader() { } + private async Task Initalize(bool async, CancellationToken cancellationToken = default) { // Four bytes, ASCII 'O', 'b', 'j', followed by 1. @@ -141,9 +146,9 @@ private async Task Initalize(bool async, CancellationToken cancellationToken = d } } - public bool HasNext() => !_initalized || _itemsRemainingInBlock > 0; + public virtual bool HasNext() => !_initalized || _itemsRemainingInBlock > 0; - public async Task Next(bool async, CancellationToken cancellationToken = default) + public virtual async Task Next(bool async, CancellationToken cancellationToken = default) { // Initialize AvroReader, if necessary. if (!_initalized) diff --git a/sdk/storage/Azure.Storage.sln b/sdk/storage/Azure.Storage.sln index 7ede30d29124e..324b3b70582a3 100644 --- a/sdk/storage/Azure.Storage.sln +++ b/sdk/storage/Azure.Storage.sln @@ -119,6 +119,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Internal.Avro EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Internal.Avro.Tests", "Azure.Storage.Internal.Avro\tests\Azure.Storage.Internal.Avro.Tests.csproj", "{A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed", "Azure.Storage.Blobs.ChangeFeed\src\Azure.Storage.Blobs.ChangeFeed.csproj", "{5EA89BEF-6367-41DD-A10F-246E0D3FDA55}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed.Tests", "Azure.Storage.Blobs.ChangeFeed\tests\Azure.Storage.Blobs.ChangeFeed.Tests.csproj", "{0780564C-4096-45B4-8DEF-132EE7CB2CF8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed.Samples.Tests", "Azure.Storage.Blobs.ChangeFeed\samples\Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj", "{5F7C7873-0E11-468C-8045-5163B068FC16}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.Core.TestFramework", "..\core\Azure.Core.TestFramework\src\Azure.Core.TestFramework.csproj", "{23B3D5C8-3160-4BD6-8B25-0D33C98ABE70}" EndProject Global @@ -223,6 +229,18 @@ Global {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Debug|Any CPU.Build.0 = Debug|Any CPU {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Release|Any CPU.ActiveCfg = Release|Any CPU {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Release|Any CPU.Build.0 = Release|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Release|Any CPU.Build.0 = Release|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Release|Any CPU.Build.0 = Release|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Release|Any CPU.Build.0 = Release|Any CPU {23B3D5C8-3160-4BD6-8B25-0D33C98ABE70}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {23B3D5C8-3160-4BD6-8B25-0D33C98ABE70}.Debug|Any CPU.Build.0 = Debug|Any CPU {23B3D5C8-3160-4BD6-8B25-0D33C98ABE70}.Release|Any CPU.ActiveCfg = Release|Any CPU