From 029bf31ec62a059881ea47e59536d0050105221f Mon Sep 17 00:00:00 2001
From: Sean McCullough <44180881+seanmcc-msft@users.noreply.github.com>
Date: Wed, 3 Jun 2020 16:24:45 -0700
Subject: [PATCH] Added Change Feed (#11692)
---
eng/Packages.Data.props | 1 +
.../BreakingChanges.txt | 6 +
.../CHANGELOG.md | 4 +
.../Azure.Storage.Blobs.ChangeFeed/README.md | 148 +++
...rage.Blobs.ChangeFeed.Samples.Tests.csproj | 31 +
.../samples/README.md | 15 +
.../samples/Sample01a_HelloWorld.cs | 115 ++
.../samples/Sample01b_HelloWorldAsync.cs | 121 ++
.../src/AssemblyInfo.cs | 13 +
.../src/AvroReaderFactory.cs | 29 +
.../src/Azure.Storage.Blobs.ChangeFeed.csproj | 48 +
.../src/BlobChangeFeedAsyncPagable.cs | 89 ++
.../src/BlobChangeFeedClient.cs | 219 ++++
.../src/BlobChangeFeedExtensions.cs | 168 +++
.../src/BlobChangeFeedPagable.cs | 80 ++
.../src/ChangeFeed.cs | 256 ++++
.../src/ChangeFeedFactory.cs | 204 ++++
.../src/Chunk.cs | 69 ++
.../src/ChunkFactory.cs | 70 ++
.../src/LazyLoadingBlobStream.cs | 246 ++++
.../src/LazyLoadingBlobStreamFactory.cs | 21 +
.../src/Models/BlobChangeFeedEvent.cs | 93 ++
.../src/Models/BlobChangeFeedEventData.cs | 129 ++
.../src/Models/BlobChangeFeedEventPage.cs | 30 +
.../src/Models/BlobChangeFeedEventType.cs | 21 +
.../src/Models/BlobChangeFeedModelFactory.cs | 72 ++
.../src/Models/ChangeFeedCursor.cs | 48 +
.../src/Models/SegmentCursor.cs | 42 +
.../src/Models/ShardCursor.cs | 44 +
.../src/Segment.cs | 131 +++
.../src/SegmentFactory.cs | 94 ++
.../src/Shard.cs | 103 ++
.../src/ShardFactory.cs | 93 ++
...zure.Storage.Blobs.ChangeFeed.Tests.csproj | 25 +
.../tests/BlobChangeFeedAsyncPagableTests.cs | 89 ++
.../tests/BlobChangeFeedExtensionsTests.cs | 157 +++
.../tests/BlobChangeFeedPagableTests.cs | 35 +
.../tests/ChangeFeedFactoryTests.cs | 68 ++
.../tests/ChangeFeedTestBase.cs | 173 +++
.../tests/ChangeFeedTests.cs | 1037 +++++++++++++++++
.../tests/ChunkTests.cs | 266 +++++
.../tests/LazyLoadingBlobStreamTests.cs | 98 ++
.../tests/Resources/ChangeFeedManifest.json | 12 +
.../tests/Resources/SegmentManifest.json | 26 +
.../tests/SegmentTests.cs | 271 +++++
.../GetSegmentsInYearTest.json | 4 +
.../GetSegmentsInYearTestAsync.json | 4 +
.../RoundDownToNearestHourTests.json | 4 +
.../RoundDownToNearestHourTestsAsync.json | 4 +
.../RoundDownToNearestYearTests.json | 4 +
.../RoundDownToNearestYearTestsAsync.json | 4 +
.../RoundUpToNearestHourTests.json | 4 +
.../RoundUpToNearestHourTestsAsync.json | 4 +
.../ToDateTimeOffsetTests.json | 4 +
.../ToDateTimeOffsetTestsAsync.json | 4 +
.../GetYearPathsTest.json | 4 +
.../GetYearPathsTestAsync.json | 4 +
.../ChangeFeedTests/GetCursor.json | 4 +
.../ChangeFeedTests/GetCursorAsync.json | 4 +
.../ChangeFeedTests/GetPage.json | 4 +
.../ChangeFeedTests/GetPageAsync.json | 4 +
.../GetSegmentsInYearTest.json | 4 +
.../GetSegmentsInYearTestAsync.json | 4 +
.../ChangeFeedTests/GetYearPathsTest.json | 4 +
.../GetYearPathsTestAsync.json | 4 +
.../NoSegmentsRemainingInStartYear.json | 4 +
.../NoSegmentsRemainingInStartYearAsync.json | 4 +
.../NoYearsAfterStartTime.json | 4 +
.../NoYearsAfterStartTimeAsync.json | 4 +
.../ChunkTests/HasNext_False.json | 4 +
.../ChunkTests/HasNext_FalseAsync.json | 4 +
.../ChunkTests/HasNext_True.json | 4 +
.../ChunkTests/HasNext_TrueAsync.json | 4 +
.../tests/SessionRecords/ChunkTests/Next.json | 4 +
.../SessionRecords/ChunkTests/NextAsync.json | 4 +
.../LazyLoadingBlobStreamTests/ReadAsync.json | 418 +++++++
.../ReadAsyncAsync.json | 418 +++++++
.../ReadAsync_InvalidParameterTests.json | 4 +
.../ReadAsync_InvalidParameterTestsAsync.json | 4 +
.../SegmentTests/GetCursor.json | 4 +
.../SegmentTests/GetCursorAsync.json | 4 +
.../SessionRecords/SegmentTests/GetPage.json | 4 +
.../SegmentTests/GetPageAsync.json | 4 +
.../SegmentTests/HasNext_False.json | 4 +
.../SegmentTests/HasNext_FalseAsync.json | 4 +
.../SegmentTests/HasNext_NotInitalized.json | 4 +
.../HasNext_NotInitalizedAsync.json | 4 +
.../SessionRecords/ShardTests/GetCursor.json | 4 +
.../ShardTests/GetCursorAsync.json | 4 +
.../ShardTests/HasNext_ChunksLeft.json | 4 +
.../ShardTests/HasNext_ChunksLeftAsync.json | 4 +
.../HasNext_CurrentChunkHasNext.json | 4 +
.../HasNext_CurrentChunkHasNextAsync.json | 4 +
.../ShardTests/HasNext_False.json | 4 +
.../ShardTests/HasNext_FalseAsync.json | 4 +
.../ShardTests/HasNext_NotInitalizes.json | 4 +
.../HasNext_NotInitalizesAsync.json | 4 +
.../tests/SessionRecords/ShardTests/Next.json | 4 +
.../SessionRecords/ShardTests/NextAsync.json | 4 +
.../tests/ShardTests.cs | 613 ++++++++++
.../src/Shared/Constants.cs | 46 +
.../src/AvroReader.cs | 13 +-
sdk/storage/Azure.Storage.sln | 18 +
103 files changed, 6840 insertions(+), 4 deletions(-)
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json
create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs
diff --git a/eng/Packages.Data.props b/eng/Packages.Data.props
index e0cda5e3bf864..d47963fa2612c 100644
--- a/eng/Packages.Data.props
+++ b/eng/Packages.Data.props
@@ -23,6 +23,7 @@
+
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt
new file mode 100644
index 0000000000000..6717353356a1c
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt
@@ -0,0 +1,6 @@
+Breaking Changes
+================
+
+12.0.0-preview.1
+--------------------------
+- New Azure.Storage.Blobs.ChangeFeed client library.
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md
new file mode 100644
index 0000000000000..de3267ebd7456
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md
@@ -0,0 +1,4 @@
+# Release History
+
+## 12.0.0-preview.1
+This preview is the first release supporting Azure Storage Blobs Change Feed.
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md
new file mode 100644
index 0000000000000..4e81313d00f72
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md
@@ -0,0 +1,148 @@
+# Azure Storage Blobs Change Feed client library for .NET
+
+> Server Version: 2019-12-12
+
+The purpose of the change feed is to provide transaction logs of all the changes that occur to
+the blobs and the blob metadata in your storage account. The change feed provides ordered,
+guaranteed, durable, immutable, read-only log of these changes. Client applications can read these
+logs at any time. The change feed enables you to build efficient and scalable solutions that
+process change events that occur in your Blob Storage account at a low cost.
+
+[Source code][source] | [Product documentation][product_docs]
+
+## Getting started
+
+### Install the package
+- TODO after we have released.
+
+### Prerequisites
+
+You need an [Azure subscription][azure_sub] and a
+[Storage Account][storage_account_docs] to use this package.
+
+To create a new Storage Account, you can use the [Azure Portal][storage_account_create_portal],
+[Azure PowerShell][storage_account_create_ps], or the [Azure CLI][storage_account_create_cli].
+Here's an example using the Azure CLI:
+
+```Powershell
+az storage account create --name MyStorageAccount --resource-group MyResourceGroup --location westus --sku Standard_LRS
+```
+
+### Authenticate the Client
+
+Authentication works the same as in [Azure.Storage.Blobs][authenticating_with_blobs].
+
+## Key concepts
+
+The change feed is stored as blobs in a special container in your storage account at standard blob
+pricing cost. You can control the retention period of these files based on your requirements
+(See the conditions of the current release). Change events are appended to the change feed as records
+in the Apache Avro format specification: a compact, fast, binary format that provides rich data structures
+with inline schema. This format is widely used in the Hadoop ecosystem, Stream Analytics, and Azure Data
+Factory.
+
+You can process these logs incrementally or in-full. Any number of client applications can independently
+read the change feed, in parallel, and at their own pace. Analytics applications such as Apache Drill or
+Apache Spark can consume logs directly as Avro files, which let you process them at a low-cost, with
+high-bandwidth, and without having to write a custom application.
+
+## Examples
+
+### Get all events in the Change Feed
+```C# Snippet:SampleSnippetsChangeFeed_GetAllEvents
+// Get all the events in the change feed.
+List changeFeedEvents = new List();
+await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync())
+{
+ changeFeedEvents.Add(changeFeedEvent);
+}
+```
+
+### Get events between a start and end time
+```C# Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime
+// Create the start and end time. The change feed client will round start time down to
+// the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets
+// with minutes and seconds.
+DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero);
+DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero);
+
+// You can also provide just a start or end time.
+await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync(
+ start: startTime,
+ end: endTime))
+{
+ changeFeedEvents.Add(changeFeedEvent);
+}
+```
+
+### Resume with cursor
+```C# Snippet:SampleSnippetsChangeFeed_ResumeWithCursor
+IAsyncEnumerator> enumerator = changeFeedClient
+ .GetChangesAsync()
+ .AsPages(pageSizeHint: 10)
+ .GetAsyncEnumerator();
+
+await enumerator.MoveNextAsync();
+
+foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values)
+{
+ changeFeedEvents.Add(changeFeedEvent);
+}
+
+// get the change feed cursor. The cursor is not required to get each page of events,
+// it is intended to be saved and used to resume iterating at a later date.
+string cursor = enumerator.Current.ContinuationToken;
+
+// Resume iterating from the pervious position with the cursor.
+await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync(
+ continuation: cursor))
+{
+ changeFeedEvents.Add(changeFeedEvent);
+}
+```
+
+## Troubleshooting
+All Blob service operations will throw a
+[RequestFailedException][RequestFailedException] on failure with
+helpful [`ErrorCode`s][error_codes]. Many of these errors are recoverable.
+
+## Next steps
+
+Get started with our [Change Feed samples][samples]:
+
+1. [Hello World](samples/Sample01a_HelloWorld.cs): Get changes that have occured in your storage account (or [asynchronously](samples/Sample01b_HelloWorldAsync.cs))
+2. [Auth](samples/Sample02_Auth.cs): Authenticate with connection strings, public access, shared keys, shared access signatures, and Azure Active Directory.
+
+
+## Contributing
+
+See the [Storage CONTRIBUTING.md][storage_contrib] for details on building,
+testing, and contributing to this library.
+
+This project welcomes contributions and suggestions. Most contributions require
+you to agree to a Contributor License Agreement (CLA) declaring that you have
+the right to, and actually do, grant us the rights to use your contribution. For
+details, visit [cla.microsoft.com][cla].
+
+This project has adopted the [Microsoft Open Source Code of Conduct][coc].
+For more information see the [Code of Conduct FAQ][coc_faq]
+or contact [opencode@microsoft.com][coc_contact] with any
+additional questions or comments.
+
+
+[source]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/storage/Azure.Storage.Blobs/srcs
+[product_docs]: https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-change-feed
+[azure_sub]: https://azure.microsoft.com/free/
+[storage_account_docs]: https://docs.microsoft.com/azure/storage/common/storage-account-overview
+[storage_account_create_ps]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell
+[storage_account_create_cli]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli
+[storage_account_create_portal]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal
+[authenticating_with_blobs]: https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.Blobs/samples/Sample02_Auth.cs
+[RequestFailedException]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/core/Azure.Core/src/RequestFailedException.cs
+[error_codes]: https://docs.microsoft.com/rest/api/storageservices/blob-service-error-codes
+[samples]: samples/
+[storage_contrib]: ../CONTRIBUTING.md
+[cla]: https://cla.microsoft.com
+[coc]: https://opensource.microsoft.com/codeofconduct/
+[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/
+[coc_contact]: mailto:opencode@microsoft.com
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj
new file mode 100644
index 0000000000000..2f30c91b605f9
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj
@@ -0,0 +1,31 @@
+
+
+ $(RequiredTargetFrameworks)
+ Microsoft Azure.Storage.Blobs.ChangeFeed client library samples
+ false
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md
new file mode 100644
index 0000000000000..e4847a5497efa
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md
@@ -0,0 +1,15 @@
+---
+page_type: sample
+languages:
+- csharp
+products:
+- azure
+- azure-storage
+name: Azure.Storage.ChangeFeed samples for .NET
+description: Samples for the Azure.Storage.Blobs.ChangeFeed client library
+---
+
+# Azure.Storage.ChangeFeed Samples
+
+- sample 0
+- sample 1
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs
new file mode 100644
index 0000000000000..fcf9da2a85f30
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs
@@ -0,0 +1,115 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection.Metadata.Ecma335;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Samples
+{
+ ///
+ /// Basic Azure ChangeFeed Storage samples.
+ ///
+ public class Sample01a_HelloWorld : SampleTest
+ {
+ ///
+ /// Download every event in the change feed.
+ ///
+ [Test]
+ public void ChangeFeed()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+
+ // Get all the events in the change feed.
+ List changeFeedEvents = new List();
+ foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges())
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ }
+
+ ///
+ /// Download change feed events between a start and end time.
+ ///
+ [Test]
+ public void ChangeFeedBetweenDates()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+ List changeFeedEvents = new List();
+
+ // Create the start and end time. The change feed client will round start time down to
+ // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets
+ // with minutes and seconds.
+ DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero);
+ DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero);
+
+ // You can also provide just a start or end time.
+ foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges(
+ start: startTime,
+ end: endTime))
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ }
+
+ ///
+ /// You can use the change feed cursor to resume iterating throw the change feed
+ /// at a later time.
+ ///
+ [Test]
+ public void ChangeFeedResumeWithCursor()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+ List changeFeedEvents = new List();
+
+ IEnumerator> enumerator = changeFeedClient
+ .GetChanges()
+ .AsPages(pageSizeHint: 10)
+ .GetEnumerator();
+ ;
+
+ enumerator.MoveNext();
+
+ foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values)
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+
+ // get the change feed cursor. The cursor is not required to get each page of events,
+ // it is intended to be saved and used to resume iterating at a later date.
+ string cursor = enumerator.Current.ContinuationToken;
+
+ // Resume iterating from the pervious position with the cursor.
+ foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges(
+ continuation: cursor))
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs
new file mode 100644
index 0000000000000..3dc4f738969ac
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs
@@ -0,0 +1,121 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using Azure.Storage;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Samples
+{
+ ///
+ /// Basic Azure ChangeFeed Storage samples.
+ ///
+ public class Sample01b_HelloWorldAsync : SampleTest
+ {
+ ///
+ /// Download every event in the change feed.
+ ///
+ [Test]
+ public async Task ChangeFeedAsync()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+
+ #region Snippet:SampleSnippetsChangeFeed_GetAllEvents
+ // Get all the events in the change feed.
+ List changeFeedEvents = new List();
+ await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync())
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ #endregion
+ }
+
+ ///
+ /// Download change feed events between a start and end time.
+ ///
+ [Test]
+ public async Task ChangeFeedBetweenDatesAsync()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+ List changeFeedEvents = new List();
+
+ #region Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime
+ // Create the start and end time. The change feed client will round start time down to
+ // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets
+ // with minutes and seconds.
+ DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero);
+ DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero);
+
+ // You can also provide just a start or end time.
+ await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync(
+ start: startTime,
+ end: endTime))
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ #endregion
+ }
+
+ ///
+ /// You can use the change feed cursor to resume iterating throw the change feed
+ /// at a later time.
+ ///
+ [Test]
+ public async Task ChangeFeedResumeWithCursorAsync()
+ {
+ // Get a connection string to our Azure Storage account.
+ string connectionString = ConnectionString;
+
+ // Get a new blob service client.
+ BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString);
+
+ // Get a new change feed client.
+ BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient();
+ List changeFeedEvents = new List();
+
+ #region Snippet:SampleSnippetsChangeFeed_ResumeWithCursor
+ IAsyncEnumerator> enumerator = changeFeedClient
+ .GetChangesAsync()
+ .AsPages(pageSizeHint: 10)
+ .GetAsyncEnumerator();
+
+ await enumerator.MoveNextAsync();
+
+ foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values)
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+
+ // get the change feed cursor. The cursor is not required to get each page of events,
+ // it is intended to be saved and used to resume iterating at a later date.
+ string cursor = enumerator.Current.ContinuationToken;
+
+ // Resume iterating from the pervious position with the cursor.
+ await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync(
+ continuation: cursor))
+ {
+ changeFeedEvents.Add(changeFeedEvent);
+ }
+ #endregion
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs
new file mode 100644
index 0000000000000..0c57a2e78eba5
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs
@@ -0,0 +1,13 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System.Runtime.CompilerServices;
+
+[assembly: InternalsVisibleTo("Azure.Storage.Blobs.ChangeFeed.Tests, PublicKey=" +
+ "0024000004800000940000000602000000240000525341310004000001000100d15ddcb2968829" +
+ "5338af4b7686603fe614abd555e09efba8fb88ee09e1f7b1ccaeed2e8f823fa9eef3fdd60217fc" +
+ "012ea67d2479751a0b8c087a4185541b851bd8b16f8d91b840e51b1cb0ba6fe647997e57429265" +
+ "e85ef62d565db50a69ae1647d54d7bd855e4db3d8a91510e5bcbd0edfbbecaa20a7bd9ae74593d" +
+ "aa7b11b4")]
+[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7")]
+[assembly: Azure.Core.AzureResourceProviderNamespace("Microsoft.Storage")]
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs
new file mode 100644
index 0000000000000..13ba1f019e595
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs
@@ -0,0 +1,29 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System.IO;
+using Azure.Storage.Internal.Avro;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// Creates AvroReaders. Allows us to inject mock AvroReaders in
+ /// the Chunk unit tests.
+ ///
+ internal class AvroReaderFactory
+ {
+ public virtual AvroReader BuildAvroReader(Stream dataStream)
+ => new AvroReader(dataStream);
+
+ public virtual AvroReader BuildAvroReader(
+ Stream dataStream,
+ Stream headStream,
+ long blockOffset,
+ long eventIndex)
+ => new AvroReader(
+ dataStream,
+ headStream,
+ blockOffset,
+ eventIndex);
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj
new file mode 100644
index 0000000000000..d1d9b80dcdce9
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj
@@ -0,0 +1,48 @@
+
+
+ $(RequiredTargetFrameworks)
+
+
+ Microsoft Azure.Storage.Blobs.ChangeFeed client library
+ 12.0.0-preview.1
+ ChangeFeedSDK;$(DefineConstants)
+ Microsoft Azure Change Feed;Microsoft;Azure;Storage;StorageScalable;$(PackageCommonTags)
+
+ This client library enables working with the Microsoft Azure Storage Change Feed feature to review and monitor changes to an Azure Storage account.
+ For this release see notes - https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/README.md and https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/CHANGELOG.md
+ in addition to the breaking changes https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/BreakingChanges.txt
+ Microsoft Azure Storage quickstarts and tutorials - TODO
+ Microsoft Azure Storage REST API Reference - TODO
+
+ false
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs
new file mode 100644
index 0000000000000..dce79b08a94be
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// BlobChangeFeedPagableAsync.
+ ///
+ public class BlobChangeFeedAsyncPagable : AsyncPageable
+ {
+ private readonly ChangeFeedFactory _changeFeedFactory;
+ private readonly BlobServiceClient _blobServiceClient;
+ private readonly DateTimeOffset? _startTime;
+ private readonly DateTimeOffset? _endTime;
+ private readonly string _continuation;
+
+ ///
+ /// Internal constructor.
+ ///
+ internal BlobChangeFeedAsyncPagable(
+ BlobServiceClient blobServiceClient,
+ DateTimeOffset? startTime = default,
+ DateTimeOffset? endTime = default)
+ {
+ _changeFeedFactory = new ChangeFeedFactory(blobServiceClient);
+ _blobServiceClient = blobServiceClient;
+ _startTime = startTime;
+ _endTime = endTime;
+ }
+
+ internal BlobChangeFeedAsyncPagable(
+ BlobServiceClient blobServiceClient,
+ string continuation)
+ {
+ _changeFeedFactory = new ChangeFeedFactory(blobServiceClient);
+ _blobServiceClient = blobServiceClient;
+ _continuation = continuation;
+ }
+
+ ///
+ /// Returns s as Pages.
+ ///
+ ///
+ /// Throws an . To use contination, call
+ /// .
+ ///
+ ///
+ /// Page size.
+ ///
+ ///
+ /// .
+ ///
+ public override async IAsyncEnumerable> AsPages(
+ string continuationToken = null,
+ int? pageSizeHint = null)
+ {
+ if (continuationToken != null)
+ {
+ throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChangesAsync(string) instead");
+ }
+
+ ChangeFeed changeFeed = await _changeFeedFactory.BuildChangeFeed(
+ async: true,
+ _startTime,
+ _endTime,
+ _continuation)
+ .ConfigureAwait(false);
+
+ while (changeFeed.HasNext())
+ {
+ yield return await changeFeed.GetPage(
+ async: true,
+ pageSize: pageSizeHint ?? 512).ConfigureAwait(false);
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs
new file mode 100644
index 0000000000000..d469d28534f51
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs
@@ -0,0 +1,219 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// BlobChangeFeedClient.
+ ///
+ public class BlobChangeFeedClient
+ {
+ private BlobServiceClient _blobServiceClient;
+
+ ///
+ /// Constructor.
+ ///
+ protected BlobChangeFeedClient() { }
+
+ internal BlobChangeFeedClient(BlobServiceClient blobServiceClient)
+ {
+ _blobServiceClient = blobServiceClient;
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ ///
+ /// A connection string includes the authentication information
+ /// required for your application to access data in an Azure Storage
+ /// account at runtime.
+ ///
+ /// For more information, .
+ ///
+ public BlobChangeFeedClient(string connectionString)
+ {
+ _blobServiceClient = new BlobServiceClient(connectionString);
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ ///
+ /// A connection string includes the authentication information
+ /// required for your application to access data in an Azure Storage
+ /// account at runtime.
+ ///
+ /// For more information, .
+ ///
+ ///
+ /// Optional client options that define the transport pipeline
+ /// policies for authentication, retries, etc., that are applied to
+ /// every request.
+ ///
+ public BlobChangeFeedClient(string connectionString, BlobClientOptions options)
+
+ {
+ _blobServiceClient = new BlobServiceClient(connectionString, options);
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ ///
+ /// A referencing the blob service.
+ /// This is likely to be similar to "https://{account_name}.blob.core.windows.net".
+ ///
+ ///
+ /// Optional client options that define the transport pipeline
+ /// policies for authentication, retries, etc., that are applied to
+ /// every request.
+ ///
+ public BlobChangeFeedClient(Uri serviceUri, BlobClientOptions options = default)
+ {
+ _blobServiceClient = new BlobServiceClient(serviceUri, options);
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ ///
+ /// A referencing the blob service.
+ /// This is likely to be similar to "https://{account_name}.blob.core.windows.net".
+ ///
+ ///
+ /// The shared key credential used to sign requests.
+ ///
+ ///
+ /// Optional client options that define the transport pipeline
+ /// policies for authentication, retries, etc., that are applied to
+ /// every request.
+ ///
+ public BlobChangeFeedClient(Uri serviceUri, StorageSharedKeyCredential credential, BlobClientOptions options = default)
+ {
+ _blobServiceClient = new BlobServiceClient(serviceUri, credential, options);
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ ///
+ /// A referencing the blob service.
+ /// This is likely to be similar to "https://{account_name}.blob.core.windows.net".
+ ///
+ ///
+ /// The token credential used to sign requests.
+ ///
+ ///
+ /// Optional client options that define the transport pipeline
+ /// policies for authentication, retries, etc., that are applied to
+ /// every request.
+ ///
+ public BlobChangeFeedClient(Uri serviceUri, TokenCredential credential, BlobClientOptions options = default)
+ {
+ _blobServiceClient = new BlobServiceClient(serviceUri, credential, options);
+ }
+
+ ///
+ /// GetChanges.
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedPagable GetChanges()
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedPagable pageable = new BlobChangeFeedPagable(
+ _blobServiceClient);
+ return pageable;
+ }
+
+ ///
+ /// GetChanges.
+ ///
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedPagable GetChanges(string continuation)
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedPagable pageable = new BlobChangeFeedPagable(
+ _blobServiceClient,
+ continuation);
+ return pageable;
+ }
+
+ ///
+ /// GetChanges.
+ ///
+ ///
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedPagable GetChanges(DateTimeOffset start = default, DateTimeOffset end = default)
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedPagable pageable = new BlobChangeFeedPagable(
+ _blobServiceClient,
+ start,
+ end);
+ return pageable;
+ }
+
+ ///
+ /// GetChangesAsync.
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedAsyncPagable GetChangesAsync()
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient);
+ return asyncPagable;
+ }
+
+ ///
+ /// GetChangesAsync.
+ ///
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedAsyncPagable GetChangesAsync(string continuation)
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient,
+ continuation);
+ return asyncPagable;
+ }
+
+ ///
+ /// GetChangesAsync.
+ ///
+ ///
+ ///
+ /// .
+#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ public virtual BlobChangeFeedAsyncPagable GetChangesAsync(
+ DateTimeOffset start = default,
+ DateTimeOffset end = default)
+#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken.
+ {
+ BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(
+ _blobServiceClient,
+ start,
+ end);
+ return asyncPagable;
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs
new file mode 100644
index 0000000000000..db2823badacca
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs
@@ -0,0 +1,168 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// BlobChangeFeedExtensions.
+ ///
+ public static class BlobChangeFeedExtensions
+ {
+ ///
+ /// GetChangeFeedClient.
+ ///
+ ///
+ /// .
+ public static BlobChangeFeedClient GetChangeFeedClient(this BlobServiceClient serviceClient)
+ {
+ return new BlobChangeFeedClient(serviceClient);
+ }
+
+ ///
+ /// Builds a DateTimeOffset from a segment path.
+ ///
+ internal static DateTimeOffset? ToDateTimeOffset(this string segmentPath)
+ {
+ if (segmentPath == null)
+ {
+ return default;
+ }
+ string[] splitPath = segmentPath.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
+
+ if (splitPath.Length < 3)
+ {
+ throw new ArgumentException($"{nameof(segmentPath)} is not a valid segment path.");
+ }
+
+ return new DateTimeOffset(
+ year: int.Parse(splitPath[2], CultureInfo.InvariantCulture),
+ month: splitPath.Length >= 4
+ ? int.Parse(splitPath[3], CultureInfo.InvariantCulture)
+ : 1,
+ day: splitPath.Length >= 5
+ ? int.Parse(splitPath[4], CultureInfo.InvariantCulture)
+ : 1,
+ hour: splitPath.Length >= 6
+ ? int.Parse(splitPath[5], CultureInfo.InvariantCulture) / 100
+ : 0,
+ minute: 0,
+ second: 0,
+ offset: TimeSpan.Zero);
+ }
+
+ ///
+ /// Rounds a DateTimeOffset down to the nearest hour.
+ ///
+ internal static DateTimeOffset? RoundDownToNearestHour(this DateTimeOffset? dateTimeOffset)
+ {
+ if (dateTimeOffset == null)
+ {
+ return null;
+ }
+
+ return new DateTimeOffset(
+ year: dateTimeOffset.Value.Year,
+ month: dateTimeOffset.Value.Month,
+ day: dateTimeOffset.Value.Day,
+ hour: dateTimeOffset.Value.Hour,
+ minute: 0,
+ second: 0,
+ offset: dateTimeOffset.Value.Offset);
+ }
+
+ ///
+ /// Rounds a DateTimeOffset up to the nearest hour.
+ ///
+ internal static DateTimeOffset? RoundUpToNearestHour(this DateTimeOffset? dateTimeOffset)
+ {
+ if (dateTimeOffset == null)
+ {
+ return null;
+ }
+
+ DateTimeOffset? newDateTimeOffest = dateTimeOffset.RoundDownToNearestHour();
+
+ return newDateTimeOffest.Value.AddHours(1);
+ }
+
+ internal static DateTimeOffset? RoundDownToNearestYear(this DateTimeOffset? dateTimeOffset)
+ {
+ if (dateTimeOffset == null)
+ {
+ return null;
+ }
+
+ return new DateTimeOffset(
+ year: dateTimeOffset.Value.Year,
+ month: 1,
+ day: 1,
+ hour: 0,
+ minute: 0,
+ second: 0,
+ offset: TimeSpan.Zero);
+ }
+
+ internal static async Task> GetSegmentsInYear(
+ bool async,
+ BlobContainerClient containerClient,
+ string yearPath,
+ DateTimeOffset? startTime = default,
+ DateTimeOffset? endTime = default)
+ {
+ List list = new List();
+
+ if (async)
+ {
+ await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync(
+ prefix: yearPath)
+ .ConfigureAwait(false))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value;
+ if (startTime.HasValue && segmentDateTime < startTime
+ || endTime.HasValue && segmentDateTime > endTime)
+ continue;
+
+ list.Add(blobHierarchyItem.Blob.Name);
+ }
+ }
+ else
+ {
+ foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy(
+ prefix: yearPath))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value;
+ if (startTime.HasValue && segmentDateTime < startTime
+ || endTime.HasValue && segmentDateTime > endTime)
+ continue;
+
+ list.Add(blobHierarchyItem.Blob.Name);
+ }
+ }
+
+ return new Queue(list);
+ }
+
+ internal static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTimeOffset? endDate)
+ {
+ if (endDate.HasValue && endDate.Value < lastConsumable)
+ {
+ return endDate.Value;
+ }
+
+ return lastConsumable;
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs
new file mode 100644
index 0000000000000..cde36551fdfa6
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs
@@ -0,0 +1,80 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using Azure.Core.Pipeline;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// BlobChangeFeedPagable.
+ ///
+ public class BlobChangeFeedPagable : Pageable
+ {
+ private readonly ChangeFeedFactory _changeFeedFactory;
+ private readonly BlobServiceClient _blobServiceClient;
+ private readonly DateTimeOffset? _startTime;
+ private readonly DateTimeOffset? _endTime;
+ private readonly string _continuation;
+
+ internal BlobChangeFeedPagable(
+ BlobServiceClient blobServiceClient,
+ DateTimeOffset? startTime = default,
+ DateTimeOffset? endTime = default)
+ {
+ _changeFeedFactory = new ChangeFeedFactory(blobServiceClient);
+ _blobServiceClient = blobServiceClient;
+ _startTime = startTime;
+ _endTime = endTime;
+ }
+
+ internal BlobChangeFeedPagable(
+ BlobServiceClient blobServiceClient,
+ string continuation)
+ {
+ _changeFeedFactory = new ChangeFeedFactory(blobServiceClient);
+ _blobServiceClient = blobServiceClient;
+ _continuation = continuation;
+ }
+
+ ///
+ /// Returns s as Pages.
+ ///
+ ///
+ /// Throws an . To use contination, call
+ /// .
+ ///
+ ///
+ /// Page size.
+ ///
+ ///
+ /// .
+ ///
+ public override IEnumerable> AsPages(string continuationToken = null, int? pageSizeHint = null)
+ {
+ if (continuationToken != null)
+ {
+ throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChanges(string) instead");
+ }
+
+ ChangeFeed changeFeed = _changeFeedFactory.BuildChangeFeed(
+ async: false,
+ _startTime,
+ _endTime,
+ _continuation)
+ .EnsureCompleted();
+
+ while (changeFeed.HasNext())
+ {
+ yield return changeFeed.GetPage(
+ async: false,
+ pageSize: pageSizeHint ?? 512).EnsureCompleted();
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs
new file mode 100644
index 0000000000000..9cab135ffad16
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs
@@ -0,0 +1,256 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using System.Threading;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class ChangeFeed
+ {
+ ///
+ /// BlobContainerClient for making List Blob requests and creating Segments.
+ ///
+ private readonly BlobContainerClient _containerClient;
+
+ ///
+ /// A for creating new s.
+ ///
+ private readonly SegmentFactory _segmentFactory;
+
+ ///
+ /// Queue of paths to years we haven't processed yet.
+ ///
+ private readonly Queue _years;
+
+ ///
+ /// Paths to segments in the current year we haven't processed yet.
+ ///
+ private Queue _segments;
+
+ ///
+ /// The Segment we are currently processing.
+ ///
+ private Segment _currentSegment;
+
+ ///
+ /// The latest time the Change Feed can safely be read from.
+ ///
+ private DateTimeOffset _lastConsumable;
+
+ ///
+ /// User-specified start time. If the start time occurs before Change Feed was enabled
+ /// for this account, we will start at the beginning of the Change Feed.
+ ///
+ private DateTimeOffset? _startTime;
+
+ ///
+ /// User-specified end time. If the end time occurs after _lastConsumable, we will
+ /// end at _lastConsumable.
+ ///
+ private DateTimeOffset? _endTime;
+
+ ///
+ /// If this Change Feed has no events.
+ ///
+ private bool _empty;
+
+ public ChangeFeed(
+ BlobContainerClient containerClient,
+ SegmentFactory segmentFactory,
+ Queue years,
+ Queue segments,
+ Segment currentSegment,
+ DateTimeOffset lastConsumable,
+ DateTimeOffset? startTime,
+ DateTimeOffset? endTime)
+ {
+ _containerClient = containerClient;
+ _segmentFactory = segmentFactory;
+ _years = years;
+ _segments = segments;
+ _currentSegment = currentSegment;
+ _lastConsumable = lastConsumable;
+ _startTime = startTime;
+ _endTime = endTime;
+ _empty = false;
+ }
+
+ ///
+ /// Constructor for mocking, and for creating a Change Feed with no Events.
+ ///
+ public ChangeFeed() { }
+
+ // The last segment may still be adding chunks.
+ public async Task> GetPage(
+ bool async,
+ int pageSize = Constants.ChangeFeed.DefaultPageSize,
+ CancellationToken cancellationToken = default)
+ {
+ if (!HasNext())
+ {
+ throw new InvalidOperationException("Change feed doesn't have any more events");
+ }
+
+ if (_currentSegment.DateTime >= _endTime)
+ {
+ return BlobChangeFeedEventPage.Empty();
+ }
+
+ if (!_currentSegment.Finalized)
+ {
+ return BlobChangeFeedEventPage.Empty();
+ }
+
+ if (pageSize > Constants.ChangeFeed.DefaultPageSize)
+ {
+ pageSize = Constants.ChangeFeed.DefaultPageSize;
+ }
+
+ // Get next page
+ List blobChangeFeedEvents = new List();
+
+ int remainingEvents = pageSize;
+ while (blobChangeFeedEvents.Count < pageSize
+ && HasNext())
+ {
+ List newEvents = await _currentSegment.GetPage(
+ async,
+ remainingEvents,
+ cancellationToken).ConfigureAwait(false);
+ blobChangeFeedEvents.AddRange(newEvents);
+ remainingEvents -= newEvents.Count;
+ await AdvanceSegmentIfNecessary(async).ConfigureAwait(false);
+ }
+
+ return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor()));
+ }
+
+ public bool HasNext()
+ {
+ // [If Change Feed is empty], or [current segment is not finalized]
+ // or ([segment count is 0] and [year count is 0] and [current segment doesn't have next])
+ if (_empty
+ || !_currentSegment.Finalized
+ || _segments.Count == 0
+ && _years.Count == 0
+ && !_currentSegment.HasNext())
+ {
+ return false;
+ }
+
+ if (_endTime.HasValue)
+ {
+ return _currentSegment.DateTime < _endTime;
+ }
+
+ return true;
+ }
+
+ public DateTimeOffset LastConsumable()
+ {
+ return _lastConsumable;
+ }
+
+ internal ChangeFeedCursor GetCursor()
+ => new ChangeFeedCursor(
+ urlHash: _containerClient.Uri.ToString().GetHashCode(),
+ endDateTime: _endTime,
+ currentSegmentCursor: _currentSegment.GetCursor());
+
+ internal async Task> GetSegmentsInYear(
+ bool async,
+ string yearPath,
+ DateTimeOffset? startTime = default,
+ DateTimeOffset? endTime = default)
+ {
+ List list = new List();
+
+ if (async)
+ {
+ await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync(
+ prefix: yearPath)
+ .ConfigureAwait(false))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value;
+ if (startTime.HasValue && segmentDateTime < startTime
+ || endTime.HasValue && segmentDateTime > endTime)
+ continue;
+
+ list.Add(blobHierarchyItem.Blob.Name);
+ }
+ }
+ else
+ {
+ foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy(
+ prefix: yearPath))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value;
+ if (startTime.HasValue && segmentDateTime < startTime
+ || endTime.HasValue && segmentDateTime > endTime)
+ continue;
+
+ list.Add(blobHierarchyItem.Blob.Name);
+ }
+ }
+
+ return new Queue(list);
+ }
+
+ private async Task AdvanceSegmentIfNecessary(bool async)
+ {
+ // If the current segment has more Events, we don't need to do anything.
+ if (_currentSegment.HasNext())
+ {
+ return;
+ }
+
+ // If the current segment is completed, remove it
+ if (_segments.Count > 0)
+ {
+ _currentSegment = await _segmentFactory.BuildSegment(
+ async,
+ _segments.Dequeue()).ConfigureAwait(false);
+ }
+
+ // If _segments is empty, refill it
+ else if (_segments.Count == 0 && _years.Count > 0)
+ {
+ string yearPath = _years.Dequeue();
+
+ // Get Segments for first year
+ _segments = await GetSegmentsInYear(
+ async: async,
+ yearPath: yearPath,
+ startTime: _startTime,
+ endTime: _endTime)
+ .ConfigureAwait(false);
+
+ if (_segments.Count > 0)
+ {
+ _currentSegment = await _segmentFactory.BuildSegment(
+ async,
+ _segments.Dequeue())
+ .ConfigureAwait(false);
+ }
+ }
+ }
+
+ public static ChangeFeed Empty()
+ => new ChangeFeed
+ {
+ _empty = true
+ };
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs
new file mode 100644
index 0000000000000..1b43a443a3b47
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs
@@ -0,0 +1,204 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class ChangeFeedFactory
+ {
+ private readonly SegmentFactory _segmentFactory;
+ private readonly BlobContainerClient _containerClient;
+
+ public ChangeFeedFactory(
+ BlobServiceClient blobServiceClient)
+ {
+ _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName);
+ _segmentFactory = new SegmentFactory(
+ _containerClient,
+ new ShardFactory(
+ _containerClient,
+ new ChunkFactory(
+ _containerClient,
+ new LazyLoadingBlobStreamFactory(),
+ new AvroReaderFactory())));
+ }
+
+ public ChangeFeedFactory(
+ BlobContainerClient containerClient,
+ SegmentFactory segmentFactory)
+ {
+ _containerClient = containerClient;
+ _segmentFactory = segmentFactory;
+ }
+
+ public async Task BuildChangeFeed(
+ bool async,
+ DateTimeOffset? startTime = default,
+ DateTimeOffset? endTime = default,
+ string continuation = default)
+ {
+ DateTimeOffset lastConsumable;
+ Queue years = new Queue();
+ Queue segments = new Queue();
+ ChangeFeedCursor cursor = null;
+
+ // Create cursor
+ if (continuation != null)
+ {
+ cursor = JsonSerializer.Deserialize(continuation);
+ ValidateCursor(_containerClient, cursor);
+ startTime = cursor.CurrentSegmentCursor.SegmentTime;
+ endTime = cursor.EndTime;
+ }
+ // Round start and end time if we are not using the cursor.
+ else
+ {
+ startTime = startTime.RoundDownToNearestHour();
+ endTime = endTime.RoundUpToNearestHour();
+ }
+
+ // Check if Change Feed has been abled for this account.
+ bool changeFeedContainerExists;
+
+ if (async)
+ {
+ changeFeedContainerExists = await _containerClient.ExistsAsync().ConfigureAwait(false);
+ }
+ else
+ {
+ changeFeedContainerExists = _containerClient.Exists();
+ }
+
+ if (!changeFeedContainerExists)
+ {
+ throw new ArgumentException("Change Feed hasn't been enabled on this account, or is currently being enabled.");
+ }
+
+ // Get last consumable
+ BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath);
+ BlobDownloadInfo blobDownloadInfo;
+ if (async)
+ {
+ blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false);
+ }
+ else
+ {
+ blobDownloadInfo = blobClient.Download();
+ }
+
+ JsonDocument jsonMetaSegment;
+ if (async)
+ {
+ jsonMetaSegment = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false);
+ }
+ else
+ {
+ jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content);
+ }
+
+ lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset();
+
+ // Get year paths
+ years = await GetYearPaths(async).ConfigureAwait(false);
+
+ // Dequeue any years that occur before start time
+ if (startTime.HasValue)
+ {
+ while (years.Count > 0
+ && years.Peek().ToDateTimeOffset() < startTime.RoundDownToNearestYear())
+ {
+ years.Dequeue();
+ }
+ }
+
+ // There are no years.
+ if (years.Count == 0)
+ {
+ return ChangeFeed.Empty();
+ }
+
+ while (segments.Count == 0 && years.Count > 0)
+ {
+ // Get Segments for year
+ segments = await BlobChangeFeedExtensions.GetSegmentsInYear(
+ async: async,
+ containerClient: _containerClient,
+ yearPath: years.Dequeue(),
+ startTime: startTime,
+ endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime))
+ .ConfigureAwait(false);
+ }
+
+ // We were on the last year, and there were no more segments.
+ if (segments.Count == 0)
+ {
+ return ChangeFeed.Empty();
+ }
+
+ Segment currentSegment = await _segmentFactory.BuildSegment(
+ async,
+ segments.Dequeue(),
+ cursor?.CurrentSegmentCursor)
+ .ConfigureAwait(false);
+
+ return new ChangeFeed(
+ _containerClient,
+ _segmentFactory,
+ years,
+ segments,
+ currentSegment,
+ lastConsumable,
+ startTime,
+ endTime);
+ }
+
+ private static void ValidateCursor(
+ BlobContainerClient containerClient,
+ ChangeFeedCursor cursor)
+ {
+ if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash)
+ {
+ throw new ArgumentException("Cursor URL does not match container URL");
+ }
+ }
+
+ internal async Task> GetYearPaths(
+ bool async)
+ {
+ List list = new List();
+
+ if (async)
+ {
+ await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync(
+ prefix: Constants.ChangeFeed.SegmentPrefix,
+ delimiter: "/").ConfigureAwait(false))
+ {
+ if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment))
+ continue;
+
+ list.Add(blobHierarchyItem.Prefix);
+ }
+ }
+ else
+ {
+ foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy(
+ prefix: Constants.ChangeFeed.SegmentPrefix,
+ delimiter: "/"))
+ {
+ if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment))
+ continue;
+
+ list.Add(blobHierarchyItem.Prefix);
+ }
+ }
+ return new Queue(list);
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs
new file mode 100644
index 0000000000000..37262bf3cc5fd
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs
@@ -0,0 +1,69 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Internal.Avro;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// Chunk.
+ ///
+ internal class Chunk
+ {
+ ///
+ /// Avro Reader to parser the Events.
+ ///
+ private readonly AvroReader _avroReader;
+
+ ///
+ /// The byte offset of the beginning of the current
+ /// Block.
+ ///
+ public virtual long BlockOffset { get; private set; }
+
+ ///
+ /// The index of the Event within the current block.
+ ///
+ public virtual long EventIndex { get; private set; }
+
+ public Chunk(
+ AvroReader avroReader,
+ long blockOffset,
+ long eventIndex)
+ {
+ _avroReader = avroReader;
+ BlockOffset = blockOffset;
+ EventIndex = eventIndex;
+ }
+
+ public virtual bool HasNext()
+ => _avroReader.HasNext();
+
+ public virtual async Task Next(
+ bool async,
+ CancellationToken cancellationToken = default)
+ {
+ Dictionary result;
+
+ if (!HasNext())
+ {
+ return null;
+ }
+
+ result = (Dictionary)await _avroReader.Next(async, cancellationToken).ConfigureAwait(false);
+ BlockOffset = _avroReader.BlockOffset;
+ EventIndex = _avroReader.ObjectIndex;
+ return new BlobChangeFeedEvent(result);
+ }
+
+ ///
+ /// Constructor for mocking. Do not use.
+ ///
+ internal Chunk() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs
new file mode 100644
index 0000000000000..b400045f1a171
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs
@@ -0,0 +1,70 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System.IO;
+using Azure.Storage.Internal.Avro;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class ChunkFactory
+ {
+ private readonly LazyLoadingBlobStreamFactory _lazyLoadingBlobStreamFactory;
+ private readonly AvroReaderFactory _avroReaderFactory;
+ private readonly BlobContainerClient _containerClient;
+
+ public ChunkFactory(
+ BlobContainerClient containerClient,
+ LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory,
+ AvroReaderFactory avroReaderFactory)
+ {
+ _containerClient = containerClient;
+ _lazyLoadingBlobStreamFactory = lazyLoadingBlobStreamFactory;
+ _avroReaderFactory = avroReaderFactory;
+ }
+
+ public virtual Chunk BuildChunk(
+ string chunkPath,
+ long? blockOffset = default,
+ long? eventIndex = default)
+ {
+ BlobClient blobClient = _containerClient.GetBlobClient(chunkPath);
+ blockOffset ??= 0;
+ eventIndex ??= 0;
+ AvroReader avroReader;
+
+ Stream dataStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream(
+ blobClient,
+ offset: blockOffset.Value,
+ blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize);
+
+ // We aren't starting from the beginning of the Chunk
+ if (blockOffset != 0)
+ {
+ Stream headStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream(
+ blobClient,
+ offset: 0,
+ blockSize: 3 * Constants.KB);
+
+ avroReader = _avroReaderFactory.BuildAvroReader(
+ dataStream,
+ headStream,
+ blockOffset.Value,
+ eventIndex.Value);
+ }
+ else
+ {
+ avroReader = _avroReaderFactory.BuildAvroReader(dataStream);
+ }
+
+ return new Chunk(
+ avroReader,
+ blockOffset.Value,
+ eventIndex.Value);
+ }
+
+ ///
+ /// Constructor for mocking.
+ ///
+ public ChunkFactory() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs
new file mode 100644
index 0000000000000..9d2ac838acd1b
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs
@@ -0,0 +1,246 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Core.Pipeline;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class LazyLoadingBlobStream : Stream
+ {
+ ///
+ /// BlobClient to make download calls with.
+ ///
+ private readonly BlobClient _blobClient;
+
+ ///
+ /// The offset within the blob of the next block we will download.
+ ///
+ private long _offset;
+
+ ///
+ /// The number of bytes we'll download with each download call.
+ ///
+ private readonly long _blockSize;
+
+ ///
+ /// Underlying Stream.
+ ///
+ private Stream _stream;
+
+ ///
+ /// If this LazyLoadingBlobStream has been initalized.
+ ///
+ private bool _initalized;
+
+ ///
+ /// The number of bytes in the last download call.
+ ///
+ private long _lastDownloadBytes;
+
+ ///
+ /// The current length of the blob.
+ ///
+ private long _blobLength;
+
+ public LazyLoadingBlobStream(BlobClient blobClient, long offset, long blockSize)
+ {
+ _blobClient = blobClient;
+ _offset = offset;
+ _blockSize = blockSize;
+ _initalized = false;
+ }
+
+ ///
+ /// Constructor for mocking.
+ ///
+ public LazyLoadingBlobStream() { }
+
+ ///
+ public override int Read(
+ byte[] buffer,
+ int offset,
+ int count)
+ => ReadInternal(
+ async: false,
+ buffer,
+ offset,
+ count).EnsureCompleted();
+
+ ///
+ public override async Task ReadAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken)
+ => await ReadInternal(
+ async: true,
+ buffer,
+ offset,
+ count,
+ cancellationToken).ConfigureAwait(false);
+
+ ///
+ /// Initalizes this LazyLoadingBlobStream.
+ /// The number of bytes that were downloaded in the first download request.
+ ///
+ private async Task Initalize(bool async, CancellationToken cancellationToken)
+ {
+ await DownloadBlock(async, cancellationToken).ConfigureAwait(false);
+ _initalized = true;
+ }
+
+ ///
+ /// Downloads the next block.
+ /// Number of bytes that were downloaded
+ ///
+ private async Task DownloadBlock(bool async, CancellationToken cancellationToken)
+ {
+ Response response;
+ HttpRange range = new HttpRange(_offset, _blockSize);
+
+ response = async
+ ? await _blobClient.DownloadAsync(range, cancellationToken: cancellationToken).ConfigureAwait(false)
+ : _blobClient.Download(range);
+ _stream = response.Value.Content;
+ _offset += response.Value.ContentLength;
+ _lastDownloadBytes = response.Value.ContentLength;
+ _blobLength = GetBlobLength(response);
+ }
+
+ ///
+ /// Shared sync and async Read implementation.
+ ///
+ private async Task ReadInternal(
+ bool async,
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken = default)
+ {
+ ValidateReadParameters(buffer, offset, count);
+
+ if (!_initalized)
+ {
+ await Initalize(async, cancellationToken: cancellationToken).ConfigureAwait(false);
+ if (_lastDownloadBytes == 0)
+ {
+ return 0;
+ }
+ }
+
+ int totalCopiedBytes = 0;
+ do
+ {
+ int copiedBytes = async
+ ? await _stream.ReadAsync(buffer, offset, count).ConfigureAwait(false)
+ : _stream.Read(buffer, offset, count);
+ offset += copiedBytes;
+ count -= copiedBytes;
+ totalCopiedBytes += copiedBytes;
+
+ // We've run out of bytes in the current block.
+ if (copiedBytes == 0)
+ {
+ // We hit the end of the blob with the last download call.
+ if (_offset == _blobLength)
+ {
+ return totalCopiedBytes;
+ }
+
+ // Download the next block
+ else
+ {
+ await DownloadBlock(async, cancellationToken).ConfigureAwait(false);
+ }
+ }
+ }
+ while (count > 0);
+ return totalCopiedBytes;
+ }
+
+ private static void ValidateReadParameters(byte[] buffer, int offset, int count)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException($"{nameof(buffer)}", $"{nameof(buffer)} cannot be null.");
+ }
+
+ if (offset < 0)
+ {
+ throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot be less than 0.");
+ }
+
+ if (offset > buffer.Length)
+ {
+ throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot exceed {nameof(buffer)} length.");
+ }
+
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException($"{nameof(count)} cannot be less than 0.");
+ }
+
+ if (offset + count > buffer.Length)
+ {
+ throw new ArgumentOutOfRangeException($"{nameof(offset)} + {nameof(count)} cannot exceed {nameof(buffer)} length.");
+ }
+ }
+
+ private static long GetBlobLength(Response response)
+ {
+ string lengthString = response.Value.Details.ContentRange;
+ string[] split = lengthString.Split('/');
+ return Convert.ToInt64(split[1], CultureInfo.InvariantCulture);
+ }
+
+ ///
+ public override bool CanRead => true;
+
+ ///
+ public override bool CanSeek => false;
+
+ ///
+ public override bool CanWrite => throw new NotSupportedException();
+
+ public override long Length => throw new NotSupportedException();
+
+ ///
+ public override long Position {
+ get => _stream.Position;
+ set => throw new NotSupportedException();
+ }
+
+ ///
+ public override void Flush()
+ {
+ }
+
+ ///
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ throw new NotSupportedException();
+ }
+
+ ///
+ public override void SetLength(long value)
+ {
+ throw new NotSupportedException();
+ }
+
+ ///
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ throw new NotSupportedException();
+ }
+
+ protected override void Dispose(bool disposing) => _stream.Dispose();
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs
new file mode 100644
index 0000000000000..73fd0a94b896e
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs
@@ -0,0 +1,21 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// Creates LazyLoadingBlobStreams. Allows us to inject mock
+ /// LazyLoadingBlobStreams in the Chunk unit tests.
+ ///
+ internal class LazyLoadingBlobStreamFactory
+ {
+ public virtual LazyLoadingBlobStream BuildLazyLoadingBlobStream(
+ BlobClient blobClient,
+ long offset,
+ long blockSize)
+ => new LazyLoadingBlobStream(
+ blobClient,
+ offset,
+ blockSize);
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs
new file mode 100644
index 0000000000000..ea0f595e30258
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs
@@ -0,0 +1,93 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Globalization;
+using System.Text;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// BlobChangeFeedEvent.
+ ///
+ public class BlobChangeFeedEvent
+ {
+ ///
+ /// Internal constructor.
+ ///
+ internal BlobChangeFeedEvent(Dictionary record)
+ {
+ Topic = (string)record[Constants.ChangeFeed.Event.Topic];
+ Subject = (string)record[Constants.ChangeFeed.Event.Subject];
+ EventType = ToBlobChangeFeedEventType((string)record[Constants.ChangeFeed.Event.EventType]);
+ EventTime = DateTimeOffset.Parse((string)record[Constants.ChangeFeed.Event.EventTime], CultureInfo.InvariantCulture);
+ Id = Guid.Parse((string)record[Constants.ChangeFeed.Event.EventId]);
+ EventData = new BlobChangeFeedEventData((Dictionary)record[Constants.ChangeFeed.Event.Data]);
+ record.TryGetValue(Constants.ChangeFeed.Event.DataVersion, out object dataVersion);
+ DataVersion = (long?)dataVersion;
+ record.TryGetValue(Constants.ChangeFeed.Event.MetadataVersion, out object metadataVersion);
+ MetadataVersion = (string)metadataVersion;
+ }
+
+ internal BlobChangeFeedEvent() { }
+
+ ///
+ /// Full resource path to the event source. This field is not writeable. Event Grid provides this value.
+ ///
+ public string Topic { get; internal set; }
+
+ ///
+ /// Publisher-defined path to the event subject.
+ ///
+ public string Subject { get; internal set; }
+
+ ///
+ /// One of the registered event types for this event source.
+ ///
+ public BlobChangeFeedEventType EventType { get; internal set; }
+
+ ///
+ /// The time the event is generated based on the provider's UTC time.
+ ///
+ public DateTimeOffset EventTime { get; internal set; }
+
+ ///
+ /// Unique identifier for the event.
+ ///
+ public Guid Id { get; internal set; }
+
+ ///
+ /// Blob storage event data.
+ ///
+ public BlobChangeFeedEventData EventData { get; internal set; }
+
+ ///
+ /// The schema version of the data object. The publisher defines the schema version.
+ ///
+ public long? DataVersion { get; internal set; }
+
+ ///
+ /// The schema version of the event metadata. Event Grid defines the schema of the top-level properties.
+ /// Event Grid provides this value.
+ ///
+ public string MetadataVersion { get; internal set; }
+
+ ///
+ public override string ToString() => $"{EventTime}: {EventType} {Subject} ({EventData?.ToString() ?? "Unknown Event"})";
+
+ private static BlobChangeFeedEventType ToBlobChangeFeedEventType(string s)
+ {
+ switch (s)
+ {
+ case "BlobCreated":
+ return BlobChangeFeedEventType.BlobCreated;
+ case "BlobDeleted":
+ return BlobChangeFeedEventType.BlobDeleted;
+ default:
+ return default;
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs
new file mode 100644
index 0000000000000..8edb9b1a2d4d0
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// BlobChangeFeedEventData.
+ ///
+ public class BlobChangeFeedEventData
+ {
+ ///
+ /// Internal constructor.
+ ///
+ internal BlobChangeFeedEventData() { }
+
+ internal BlobChangeFeedEventData(Dictionary record)
+ {
+ Api = ((string)record[Constants.ChangeFeed.EventData.Api]);
+ ClientRequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.ClientRequestId]);
+ RequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.RequestId]);
+ ETag = new ETag((string)record[Constants.ChangeFeed.EventData.Etag]);
+ ContentType = (string)record[Constants.ChangeFeed.EventData.ContentType];
+ ContentLength = (long)record[Constants.ChangeFeed.EventData.ContentLength];
+ BlobType = ((string)record[Constants.ChangeFeed.EventData.BlobType]) switch
+ {
+ Constants.ChangeFeed.EventData.BlockBlob => BlobType.Block,
+ Constants.ChangeFeed.EventData.PageBlob => BlobType.Page,
+ Constants.ChangeFeed.EventData.AppendBlob => BlobType.Append,
+ _ => default
+ };
+ record.TryGetValue(Constants.ChangeFeed.EventData.ContentOffset, out object contentOffset);
+ ContentOffset = (long?)contentOffset;
+ record.TryGetValue(Constants.ChangeFeed.EventData.DestinationUrl, out object destinationUrl);
+ DestinationUri = !string.IsNullOrEmpty((string)destinationUrl) ? new Uri((string)destinationUrl) : null;
+ record.TryGetValue(Constants.ChangeFeed.EventData.SourceUrl, out object sourceUrl);
+ SourceUri = !string.IsNullOrEmpty((string)sourceUrl) ? new Uri((string)sourceUrl) : null;
+ record.TryGetValue(Constants.ChangeFeed.EventData.Url, out object url);
+ Uri = !string.IsNullOrEmpty((string)url) ? new Uri((string)url) : null;
+ record.TryGetValue(Constants.ChangeFeed.EventData.Recursive, out object recursive);
+ Recursive = (bool?)recursive;
+ Sequencer = (string)record[Constants.ChangeFeed.EventData.Sequencer];
+ }
+
+ ///
+ /// The operation that triggered the event.
+ ///
+ public string Api { get; internal set; }
+
+ ///
+ /// A client-provided request id for the storage API operation. This id can be used to correlate to Azure Storage
+ /// diagnostic logs using the "client-request-id" field in the logs, and can be provided in client requests using
+ /// the "x-ms-client-request-id" header.
+ ///
+ public Guid ClientRequestId { get; internal set; }
+
+ ///
+ /// Service-generated request id for the storage API operation. Can be used to correlate to Azure Storage diagnostic
+ /// logs using the "request-id-header" field in the logs and is returned from initiating API call in the
+ /// 'x-ms-request-id' header.
+ ///
+ public Guid RequestId { get; internal set; }
+
+ ///
+ /// The value that you can use to perform operations conditionally.
+ ///
+ public ETag ETag { get; internal set; }
+
+ ///
+ /// The content type specified for the blob.
+ ///
+ public string ContentType { get; internal set; }
+
+ ///
+ /// The size of the blob in bytes.
+ ///
+ public long ContentLength { get; internal set; }
+
+ ///
+ /// The type of blob. Valid values are either BlockBlob or PageBlob.
+ ///
+ public BlobType BlobType { get; internal set; }
+
+ ///
+ /// The offset in bytes of a write operation taken at the point where the event-triggering application completed
+ /// writing to the file.
+ /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace.
+ ///
+ public long? ContentOffset { get; internal set; }
+
+ ///
+ /// The url of the file that will exist after the operation completes. For example, if a file is renamed,
+ /// the destinationUrl property contains the url of the new file name.
+ /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace.
+ ///
+ public Uri DestinationUri { get; internal set; }
+
+ ///
+ /// The url of the file that exists prior to the operation. For example, if a file is renamed, the sourceUrl
+ /// contains the url of the original file name prior to the rename operation.
+ /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace.
+ ///
+ public Uri SourceUri { get; internal set; }
+
+ ///
+ /// The path to the blob.
+ /// If the client uses a Blob REST API, then the url has this structure:
+ /// (storage-account-name).blob.core.windows.net/(container-name)/(file-name)
+ /// If the client uses a Data Lake Storage REST API, then the url has this structure:
+ /// (storage-account-name).dfs.core.windows.net/(file-system-name)/(file-name).
+ ///
+ public Uri Uri { get; internal set; }
+
+ ///
+ /// True to perform the operation on all child directories; otherwise False.
+ /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace.
+ ///
+ public bool? Recursive { get; internal set; }
+
+ ///
+ /// An opaque string value representing the logical sequence of events for any particular blob name.
+ /// Users can use standard string comparison to understand the relative sequence of two events on the same blob name.
+ ///
+ public string Sequencer { get; internal set; }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs
new file mode 100644
index 0000000000000..25077943c44ba
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ internal class BlobChangeFeedEventPage : Page
+ {
+ public override IReadOnlyList Values { get; }
+ public override string ContinuationToken { get; }
+ public override Response GetRawResponse() => null;
+ //private Response _raw;
+
+ public BlobChangeFeedEventPage() { }
+
+ public BlobChangeFeedEventPage(List events, string continuationToken)
+ {
+ Values = events;
+ ContinuationToken = continuationToken;
+ }
+
+ public static BlobChangeFeedEventPage Empty()
+ => new BlobChangeFeedEventPage(
+ new List(),
+ null);
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs
new file mode 100644
index 0000000000000..c14fdef71666c
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs
@@ -0,0 +1,21 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// BlobChangeFeedEventType.
+ ///
+ public enum BlobChangeFeedEventType
+ {
+ ///
+ /// Blob created.
+ ///
+ BlobCreated = 0,
+
+ ///
+ /// Blob deleted.
+ ///
+ BlobDeleted = 1,
+ }
+}
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs
new file mode 100644
index 0000000000000..fd571354030ee
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// BlobChangeFeedModelFactory for building mock objects.
+ ///
+ public static class BlobChangeFeedModelFactory
+ {
+ ///
+ /// Creates a new BlobChangeFeedEvent instance for mocking.
+ ///
+ public static BlobChangeFeedEvent BlobChangeFeedEvent(
+ string topic,
+ string subject,
+ BlobChangeFeedEventType eventType,
+ DateTimeOffset eventTime,
+ Guid id,
+ BlobChangeFeedEventData eventData,
+ long dataVersion,
+ string metadataVersion)
+ => new BlobChangeFeedEvent
+ {
+ Topic = topic,
+ Subject = subject,
+ EventType = eventType,
+ EventTime = eventTime,
+ Id = id,
+ EventData = eventData,
+ DataVersion = dataVersion,
+ MetadataVersion = metadataVersion
+ };
+
+ ///
+ /// Creates a new BlobChangeFeedEventData instance for mocking.
+ ///
+ public static BlobChangeFeedEventData BlobChangeFeedEventData(
+ string api,
+ Guid clientRequestId,
+ Guid requestId,
+ ETag eTag,
+ string contentType,
+ long contentLength,
+ BlobType blobType,
+ long contentOffset,
+ Uri destinationUri,
+ Uri sourceUri,
+ Uri uri,
+ bool recursive,
+ string sequencer)
+ => new BlobChangeFeedEventData
+ {
+ Api = api,
+ ClientRequestId = clientRequestId,
+ RequestId = requestId,
+ ETag = eTag,
+ ContentType = contentType,
+ ContentLength = contentLength,
+ BlobType = blobType,
+ ContentOffset = contentOffset,
+ DestinationUri = destinationUri,
+ SourceUri = sourceUri,
+ Uri = uri,
+ Recursive = recursive,
+ Sequencer = sequencer
+ };
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs
new file mode 100644
index 0000000000000..8036b2427bb36
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs
@@ -0,0 +1,48 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// BlobChangeFeedCursor.
+ ///
+ internal class ChangeFeedCursor
+ {
+ ///
+ /// CursorVersion.
+ ///
+ public int CursorVersion { get; set; }
+
+ ///
+ /// UrlHash.
+ ///
+ public long UrlHash { get; set; }
+
+ ///
+ /// EndDateTime.
+ ///
+ public DateTimeOffset? EndTime { get; set; }
+
+ ///
+ /// The Segment Cursor for the current segment.
+ ///
+ public SegmentCursor CurrentSegmentCursor { get; set; }
+
+ internal ChangeFeedCursor(
+ long urlHash,
+ DateTimeOffset? endDateTime,
+ SegmentCursor currentSegmentCursor)
+ {
+ CursorVersion = 1;
+ UrlHash = urlHash;
+ EndTime = endDateTime;
+ CurrentSegmentCursor = currentSegmentCursor;
+ }
+
+ public ChangeFeedCursor() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs
new file mode 100644
index 0000000000000..a732ce776dfe1
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs
@@ -0,0 +1,42 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ ///
+ /// Segment Cursor.
+ ///
+ internal class SegmentCursor
+ {
+ ///
+ /// Shard Cursors.
+ ///
+ public List ShardCursors { get; set; }
+
+ ///
+ /// Index of the current Shard.
+ ///
+ public int ShardIndex { get; set; }
+
+ ///
+ /// The DateTimeOffset of the Segment.
+ ///
+ public DateTimeOffset SegmentTime { get; set; }
+
+ internal SegmentCursor(
+ DateTimeOffset segmentDateTime,
+ List shardCursors,
+ int shardIndex)
+ {
+ SegmentTime = segmentDateTime;
+ ShardCursors = shardCursors;
+ ShardIndex = shardIndex;
+ }
+
+ public SegmentCursor() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs
new file mode 100644
index 0000000000000..b7ff67c17ec21
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs
@@ -0,0 +1,44 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Models
+{
+ internal class ShardCursor
+ {
+ ///
+ /// Index of the current Chunk.
+ ///
+ public long ChunkIndex { get; set; }
+
+ ///
+ /// The byte offset of the beginning of
+ /// the current Avro block.
+ ///
+ public long BlockOffset { get; set; }
+
+ ///
+ /// The index of the current event within
+ /// the current Avro block.
+ ///
+ public long EventIndex { get; set; }
+
+ internal ShardCursor(
+ long chunkIndex,
+ long blockOffset,
+ long eventIndex)
+ {
+ ChunkIndex = chunkIndex;
+ BlockOffset = blockOffset;
+ EventIndex = eventIndex;
+ }
+
+ ///
+ ///
+ ///
+ public ShardCursor() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs
new file mode 100644
index 0000000000000..a8fa1955e904e
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs
@@ -0,0 +1,131 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class Segment
+ {
+ ///
+ /// If this Segment is finalized.
+ ///
+ public virtual bool Finalized { get; private set; }
+
+ ///
+ /// The time (to the nearest hour) associated with this Segment.
+ ///
+ public DateTimeOffset DateTime { get; private set; }
+
+ ///
+ /// The Shards associated with this Segment.
+ ///
+ private readonly List _shards;
+
+ ///
+ /// The Shards we have finished reading from.
+ ///
+ private readonly HashSet _finishedShards;
+
+ ///
+ /// The index of the Shard we will return the next event from.
+ ///
+ private int _shardIndex;
+
+ public Segment(
+ List shards,
+ int shardIndex,
+ DateTimeOffset dateTime,
+ bool finalized)
+ {
+ _shards = shards;
+ _shardIndex = shardIndex;
+ DateTime = dateTime;
+ Finalized = finalized;
+ _finishedShards = new HashSet();
+ }
+
+ public virtual SegmentCursor GetCursor()
+ {
+ List shardCursors = new List();
+ foreach (Shard shard in _shards)
+ {
+ shardCursors.Add(shard.GetCursor());
+ }
+ return new SegmentCursor(
+ segmentDateTime: DateTime,
+ shardCursors: shardCursors,
+ shardIndex: _shardIndex);
+ }
+
+ public virtual async Task> GetPage(
+ bool async,
+ int? pageSize,
+ CancellationToken cancellationToken = default)
+ {
+ List changeFeedEventList = new List();
+
+ if (!HasNext())
+ {
+ throw new InvalidOperationException("Segment doesn't have any more events");
+ }
+
+ int i = 0;
+ while (i < pageSize && _shards.Count > 0)
+ {
+ // If this Shard is finished, skip it.
+ if (_finishedShards.Contains(_shardIndex))
+ {
+ _shardIndex++;
+
+ if (_shardIndex == _shards.Count)
+ {
+ _shardIndex = 0;
+ }
+
+ continue;
+ }
+
+ Shard currentShard = _shards[_shardIndex];
+
+ BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async, cancellationToken).ConfigureAwait(false);
+
+ changeFeedEventList.Add(changeFeedEvent);
+
+ // If the current shard is completed, remove it from _shards
+ if (!currentShard.HasNext())
+ {
+ _finishedShards.Add(_shardIndex);
+ }
+
+ i++;
+ _shardIndex++;
+ if (_shardIndex >= _shards.Count)
+ {
+ _shardIndex = 0;
+ }
+
+ // If all the Shards are finished, we need to break out early.
+ if (_finishedShards.Count == _shards.Count)
+ {
+ break;
+ }
+ }
+
+ return changeFeedEventList;
+ }
+
+ public virtual bool HasNext()
+ => _finishedShards.Count < _shards.Count;
+
+ ///
+ /// Constructor for mocking.
+ ///
+ public Segment() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs
new file mode 100644
index 0000000000000..11849d500c196
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs
@@ -0,0 +1,94 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class SegmentFactory
+ {
+ private readonly BlobContainerClient _containerClient;
+ private readonly ShardFactory _shardFactory;
+
+ ///
+ /// Constructor for mocking.
+ ///
+ public SegmentFactory() { }
+
+ public SegmentFactory(
+ BlobContainerClient containerClient,
+ ShardFactory shardFactory)
+ {
+ _containerClient = containerClient;
+ _shardFactory = shardFactory;
+ }
+
+#pragma warning disable CA1822 // Does not acces instance data can be marked static.
+ public virtual async Task BuildSegment(
+#pragma warning restore CA1822 // Can't mock static methods in MOQ.
+ bool async,
+ string manifestPath,
+ SegmentCursor cursor = default)
+ {
+ // Models we need for later
+ List shards = new List();
+ DateTimeOffset dateTime = manifestPath.ToDateTimeOffset().Value;
+ int shardIndex = cursor?.ShardIndex ?? 0;
+
+ // Download segment manifest
+ BlobClient blobClient = _containerClient.GetBlobClient(manifestPath);
+ BlobDownloadInfo blobDownloadInfo;
+
+ if (async)
+ {
+ blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false);
+ }
+ else
+ {
+ blobDownloadInfo = blobClient.Download();
+ }
+
+ // Parse segment manifest
+ JsonDocument jsonManifest;
+
+ if (async)
+ {
+ jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false);
+ }
+ else
+ {
+ jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content);
+ }
+
+ // Initalized Finalized field
+ string statusString = jsonManifest.RootElement.GetProperty("status").GetString();
+ bool finalized = statusString == "Finalized";
+
+ int i = 0;
+ foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray())
+ {
+ string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length);
+ Shard shard = await _shardFactory.BuildShard(
+ async,
+ shardPath,
+ cursor?.ShardCursors?[i])
+ .ConfigureAwait(false);
+
+ shards.Add(shard);
+ i++;
+ }
+
+ return new Segment(
+ shards,
+ shardIndex,
+ dateTime,
+ finalized);
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs
new file mode 100644
index 0000000000000..19109ad8a28fe
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs
@@ -0,0 +1,103 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using System.Threading;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ internal class Shard
+ {
+ ///
+ /// Container Client for listing Chunks.
+ ///
+ private readonly BlobContainerClient _containerClient;
+
+ ///
+ /// ChunkFactory.
+ ///
+ private readonly ChunkFactory _chunkFactory;
+
+ ///
+ /// Queue of the paths to Chunks we haven't processed.
+ ///
+ private readonly Queue _chunks;
+
+ ///
+ /// The Chunk we are currently processing.
+ ///
+ private Chunk _currentChunk;
+
+ ///
+ /// The index of the Chunk we are processing.
+ ///
+ private long _chunkIndex;
+
+ ///
+ /// Gets the for this Shard.
+ ///
+ public virtual ShardCursor GetCursor()
+ => new ShardCursor(
+ _chunkIndex,
+ _currentChunk.BlockOffset,
+ _currentChunk.EventIndex);
+
+ ///
+ /// If this Shard has a next event.
+ ///
+ public virtual bool HasNext()
+ => _chunks.Count > 0 || _currentChunk.HasNext();
+
+ ///
+ /// Gets the next .
+ ///
+ public virtual async Task Next(
+ bool async,
+ CancellationToken cancellationToken = default)
+ {
+ if (!HasNext())
+ {
+ throw new InvalidOperationException("Shard doesn't have any more events");
+ }
+
+ BlobChangeFeedEvent changeFeedEvent;
+
+ changeFeedEvent = await _currentChunk.Next(async, cancellationToken).ConfigureAwait(false);
+
+ // Remove currentChunk if it doesn't have another event.
+ if (!_currentChunk.HasNext() && _chunks.Count > 0)
+ {
+ _currentChunk = _chunkFactory.BuildChunk(
+ _chunks.Dequeue());
+ _chunkIndex++;
+ }
+ return changeFeedEvent;
+ }
+
+ ///
+ /// Constructor for use by .
+ ///
+ public Shard(
+ BlobContainerClient containerClient,
+ ChunkFactory chunkFactory,
+ Queue chunks,
+ Chunk currentChunk,
+ long chunkIndex)
+ {
+ _containerClient = containerClient;
+ _chunkFactory = chunkFactory;
+ _chunks = chunks;
+ _currentChunk = currentChunk;
+ _chunkIndex = chunkIndex;
+ }
+
+ ///
+ /// Constructor for mocking.
+ ///
+ internal Shard() { }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs
new file mode 100644
index 0000000000000..c1185171b1cbe
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs
@@ -0,0 +1,93 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+
+namespace Azure.Storage.Blobs.ChangeFeed
+{
+ ///
+ /// Builds a Shard.
+ ///
+ internal class ShardFactory
+ {
+ private readonly ChunkFactory _chunkFactory;
+ private readonly BlobContainerClient _containerClient;
+
+ public ShardFactory(
+ BlobContainerClient containerClient,
+ ChunkFactory chunkFactory)
+ {
+ _containerClient = containerClient;
+ _chunkFactory = chunkFactory;
+ }
+
+ ///
+ /// Constructor for mocking.
+ ///
+ public ShardFactory() { }
+
+#pragma warning disable CA1822 // Does not acces instance data can be marked static.
+ public virtual async Task BuildShard(
+#pragma warning restore CA1822 // Can't mock static methods in MOQ.
+ bool async,
+ string shardPath,
+ ShardCursor shardCursor = default)
+ {
+ // Models we'll need later
+ Queue chunks = new Queue();
+ long chunkIndex = shardCursor?.ChunkIndex ?? 0;
+ long blockOffset = shardCursor?.BlockOffset ?? 0;
+ long eventIndex = shardCursor?.EventIndex ?? 0;
+
+ // Get Chunks
+ if (async)
+ {
+ await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync(
+ prefix: shardPath).ConfigureAwait(false))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name);
+ chunks.Enqueue(blobHierarchyItem.Blob.Name);
+ }
+ }
+ else
+ {
+ foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy(
+ prefix: shardPath))
+ {
+ if (blobHierarchyItem.IsPrefix)
+ continue;
+
+ //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name);
+ chunks.Enqueue(blobHierarchyItem.Blob.Name);
+ }
+ }
+
+ // Fast forward to current Chunk
+ if (chunkIndex > 0)
+ {
+ for (int i = 0; i < chunkIndex; i++)
+ {
+ chunks.Dequeue();
+ }
+ }
+
+ Chunk currentChunk = _chunkFactory.BuildChunk(
+ chunks.Dequeue(),
+ blockOffset,
+ eventIndex);
+
+ return new Shard(
+ _containerClient,
+ _chunkFactory,
+ chunks,
+ currentChunk,
+ chunkIndex);
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj
new file mode 100644
index 0000000000000..eb2fd04f30efd
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj
@@ -0,0 +1,25 @@
+
+
+ $(RequiredTargetFrameworks)
+
+
+ Microsoft Azure.Storage.Blobs.ChangeFeed client library tests
+ false
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+ PreserveNewest
+
+
+
\ No newline at end of file
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs
new file mode 100644
index 0000000000000..d188628f4bb20
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading.Tasks;
+using Azure.Core.TestFramework;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase
+ {
+ public BlobChangeFeedAsyncPagableTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ [Test]
+ [Ignore("")]
+ public async Task Test()
+ {
+ BlobServiceClient service = GetServiceClient_SharedKey();
+ BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient();
+ BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable
+ = blobChangeFeedClient.GetChangesAsync();
+ IList list = await blobChangeFeedAsyncPagable.ToListAsync();
+ foreach (BlobChangeFeedEvent e in list)
+ {
+ Console.WriteLine(e);
+ }
+ }
+
+ [Test]
+ [Ignore("")]
+ public async Task PageSizeTest()
+ {
+ int pageSize = 100;
+ BlobServiceClient service = GetServiceClient_SharedKey();
+ BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient();
+ IAsyncEnumerator> asyncEnumerator
+ = blobChangeFeedClient.GetChangesAsync().AsPages(pageSizeHint: pageSize).GetAsyncEnumerator();
+ List pageSizes = new List();
+ while (await asyncEnumerator.MoveNextAsync())
+ {
+ pageSizes.Add(asyncEnumerator.Current.Values.Count);
+ }
+
+ // All pages except the last should have a count == pageSize.
+ for (int i = 0; i < pageSizes.Count - 1; i++)
+ {
+ Assert.AreEqual(pageSize, pageSizes[i]);
+ }
+ }
+
+ [Test]
+ [Ignore("")]
+ public async Task CursorTest()
+ {
+ BlobServiceClient service = GetServiceClient_SharedKey();
+ BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient();
+ BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable
+ = blobChangeFeedClient.GetChangesAsync();
+ IAsyncEnumerable> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 500);
+ Page page = await asyncEnumerable.FirstAsync();
+ foreach (BlobChangeFeedEvent changeFeedEvent in page.Values)
+ {
+ Console.WriteLine(changeFeedEvent);
+ }
+
+ Console.WriteLine("break");
+
+ string continuation = page.ContinuationToken;
+
+ BlobChangeFeedAsyncPagable cursorBlobChangeFeedAsyncPagable
+ = blobChangeFeedClient.GetChangesAsync(continuation);
+
+ IList list = await cursorBlobChangeFeedAsyncPagable.ToListAsync();
+ foreach (BlobChangeFeedEvent e in list)
+ {
+ Console.WriteLine(e);
+ }
+
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs
new file mode 100644
index 0000000000000..631633df5223d
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs
@@ -0,0 +1,157 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Storage.Blobs.Models;
+using Moq;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class BlobChangeFeedExtensionsTests : ChangeFeedTestBase
+ {
+ public BlobChangeFeedExtensionsTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ [Test]
+ public void ToDateTimeOffsetTests()
+ {
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/02/1700/meta.json".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/02/1700/".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/02/1700".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/02/".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/02".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11/".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/11".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019/".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero),
+ "idx/segments/2019".ToDateTimeOffset());
+
+ Assert.AreEqual(
+ null,
+ ((string)null).ToDateTimeOffset());
+ }
+
+ [Test]
+ public void RoundDownToNearestHourTests()
+ {
+ Assert.AreEqual(
+ new DateTimeOffset?(
+ new DateTimeOffset(2020, 03, 17, 20, 0, 0, TimeSpan.Zero)),
+ (new DateTimeOffset?(
+ new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestHour());
+
+ Assert.AreEqual(
+ null,
+ ((DateTimeOffset?)null).RoundDownToNearestHour());
+ }
+
+ [Test]
+ public void RoundUpToNearestHourTests()
+ {
+ Assert.AreEqual(
+ new DateTimeOffset?(
+ new DateTimeOffset(2020, 03, 17, 21, 0, 0, TimeSpan.Zero)),
+ (new DateTimeOffset?(
+ new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundUpToNearestHour());
+
+ Assert.AreEqual(
+ null,
+ ((DateTimeOffset?)null).RoundUpToNearestHour());
+ }
+
+ [Test]
+ public void RoundDownToNearestYearTests()
+ {
+ Assert.AreEqual(
+ new DateTimeOffset?(
+ new DateTimeOffset(2020, 1, 1, 0, 0, 0, TimeSpan.Zero)),
+ (new DateTimeOffset?(
+ new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestYear());
+
+ Assert.AreEqual(
+ null,
+ ((DateTimeOffset?)null).RoundDownToNearestYear());
+ }
+
+
+ [Test]
+ public async Task GetSegmentsInYearTest()
+ {
+ // Arrange
+ Mock containerClient = new Mock();
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default)).Returns(pageable);
+ }
+
+ // Act
+ Queue segmentPaths = await BlobChangeFeedExtensions.GetSegmentsInYear(
+ IsAsync,
+ containerClient.Object,
+ "idx/segments/2020/",
+ startTime: new DateTimeOffset(2020, 3, 3, 0, 0, 0, TimeSpan.Zero),
+ endTime: new DateTimeOffset(2020, 3, 3, 22, 0, 0, TimeSpan.Zero));
+
+ // Assert
+ Queue expectedSegmentPaths = new Queue();
+ expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/0000/meta.json");
+ expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/1800/meta.json");
+ expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2000/meta.json");
+ expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2200/meta.json");
+
+ Assert.AreEqual(expectedSegmentPaths, segmentPaths);
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs
new file mode 100644
index 0000000000000..76a7eae639687
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs
@@ -0,0 +1,35 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class BlobChangeFeedPagableTests : ChangeFeedTestBase
+ {
+ public BlobChangeFeedPagableTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ [Test]
+ [Ignore("")]
+ public void Test()
+ {
+ BlobServiceClient service = GetServiceClient_SharedKey();
+ BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient();
+ BlobChangeFeedPagable blobChangeFeedPagable
+ = blobChangeFeedClient.GetChanges();
+ IList list = blobChangeFeedPagable.ToList();
+ foreach (BlobChangeFeedEvent e in list)
+ {
+ Console.WriteLine(e);
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs
new file mode 100644
index 0000000000000..99e4e235a9958
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs
@@ -0,0 +1,68 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Storage.Blobs.Models;
+using Moq;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class ChangeFeedFactoryTests : ChangeFeedTestBase
+ {
+ public ChangeFeedFactoryTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ [Test]
+ public async Task GetYearPathsTest()
+ {
+ // Arrange
+ Mock containerClient = new Mock(MockBehavior.Strict);
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetYearPathFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(pageable);
+ }
+
+ Mock segmentFactory = new Mock();
+ ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(
+ containerClient.Object, segmentFactory.Object);
+
+ // Act
+ Queue years = await changeFeedFactory.GetYearPaths(IsAsync).ConfigureAwait(false);
+
+ // Assert
+ Queue expectedYears = new Queue();
+ expectedYears.Enqueue("idx/segments/2019/");
+ expectedYears.Enqueue("idx/segments/2020/");
+ expectedYears.Enqueue("idx/segments/2022/");
+ expectedYears.Enqueue("idx/segments/2023/");
+ Assert.AreEqual(expectedYears, years);
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs
new file mode 100644
index 0000000000000..aefdc00ff9894
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs
@@ -0,0 +1,173 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Core.TestFramework;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Test.Shared;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class ChangeFeedTestBase : StorageTestBase
+ {
+
+ public ChangeFeedTestBase(bool async) : this(async, null) { }
+
+ public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null)
+ : base(async, mode)
+ {
+ }
+
+ public string GetNewContainerName() => $"test-container-{Recording.Random.NewGuid()}";
+ public string GetNewBlobName() => $"test-blob-{Recording.Random.NewGuid()}";
+
+ public BlobServiceClient GetServiceClient_SharedKey()
+ => InstrumentClient(
+ new BlobServiceClient(
+ new Uri(TestConfigDefault.BlobServiceEndpoint),
+ new StorageSharedKeyCredential(
+ TestConfigDefault.AccountName,
+ TestConfigDefault.AccountKey),
+ GetOptions()));
+
+ public BlobClientOptions GetOptions()
+ {
+ var options = new BlobClientOptions
+ {
+ Diagnostics = { IsLoggingEnabled = true },
+ Retry =
+ {
+ Mode = RetryMode.Exponential,
+ MaxRetries = Constants.MaxReliabilityRetries,
+ Delay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.01 : 0.5),
+ MaxDelay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.1 : 10)
+ },
+ Transport = GetTransport()
+ };
+ if (Mode != RecordedTestMode.Live)
+ {
+ options.AddPolicy(new RecordedClientRequestIdPolicy(Recording), HttpPipelinePosition.PerCall);
+ }
+
+ return Recording.InstrumentClientOptions(options);
+ }
+
+ public async Task GetTestContainerAsync(
+ BlobServiceClient service = default,
+ string containerName = default,
+ IDictionary metadata = default,
+ PublicAccessType? publicAccessType = default,
+ bool premium = default)
+ {
+
+ containerName ??= GetNewContainerName();
+ service ??= GetServiceClient_SharedKey();
+
+ if (publicAccessType == default)
+ {
+ publicAccessType = premium ? PublicAccessType.None : PublicAccessType.BlobContainer;
+ }
+
+ BlobContainerClient container = InstrumentClient(service.GetBlobContainerClient(containerName));
+ await container.CreateAsync(metadata: metadata, publicAccessType: publicAccessType.Value);
+ return new DisposingContainer(container);
+ }
+
+ public class DisposingContainer : IAsyncDisposable
+ {
+ public BlobContainerClient Container;
+
+ public DisposingContainer(BlobContainerClient client)
+ {
+ Container = client;
+ }
+
+ public async ValueTask DisposeAsync()
+ {
+ if (Container != null)
+ {
+ try
+ {
+ await Container.DeleteAsync();
+ Container = null;
+ }
+ catch
+ {
+ // swallow the exception to avoid hiding another test failure
+ }
+ }
+ }
+ }
+
+ public static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint)
+ => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint));
+
+ public static Page GetYearPathFunc(
+ string continuation,
+ int? pageSizeHint)
+ => new BlobHierarchyItemPage(new List
+ {
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null),
+ });
+
+ public static Task> GetSegmentsInYearFuncAsync(
+ string continuation,
+ int? pageSizeHint)
+ => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint));
+
+ public static Page GetSegmentsInYearFunc(
+ string continuation,
+ int? pageSizeHint)
+ => new BlobHierarchyItemPage(new List
+ {
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)),
+ });
+
+ public class BlobHierarchyItemPage : Page
+ {
+ private List _items;
+
+ public BlobHierarchyItemPage(List items)
+ {
+ _items = items;
+ }
+
+ public override IReadOnlyList Values => _items;
+
+ public override string ContinuationToken => null;
+
+ public override Response GetRawResponse()
+ {
+ throw new NotImplementedException();
+ }
+ }
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs
new file mode 100644
index 0000000000000..b23e38165a749
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs
@@ -0,0 +1,1037 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Reflection;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Core.TestFramework;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+using Moq;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class ChangeFeedTests : ChangeFeedTestBase
+ {
+ public ChangeFeedTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ ///
+ /// Tests building a ChangeFeed with a ChangeFeedCursor, and then calling ChangeFeed.GetCursor()
+ /// and making sure the cursors match.
+ ///
+ [Test]
+ public async Task GetCursor()
+ {
+ // Arrange
+ Mock serviceClient = new Mock(MockBehavior.Strict);
+ Mock containerClient = new Mock(MockBehavior.Strict);
+ Mock blobClient = new Mock(MockBehavior.Strict);
+ Mock segmentFactory = new Mock(MockBehavior.Strict);
+ Mock segment = new Mock(MockBehavior.Strict);
+
+ Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed");
+
+ serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object);
+ containerClient.Setup(r => r.Uri).Returns(containerUri);
+
+ if (IsAsync)
+ {
+ containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200)));
+ }
+ else
+ {
+ containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200)));
+ }
+
+ containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object);
+
+ using FileStream stream = File.OpenRead(
+ $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}");
+ BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream);
+ Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200));
+
+ if (IsAsync)
+ {
+ blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse);
+ }
+ else
+ {
+ blobClient.Setup(r => r.Download()).Returns(downloadResponse);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetYearPathFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(pageable);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default)).Returns(pageable);
+ }
+
+ segmentFactory.Setup(r => r.BuildSegment(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny()))
+ .ReturnsAsync(segment.Object);
+
+ long chunkIndex = 1;
+ long blockOffset = 2;
+ long eventIndex = 3;
+ ShardCursor shardCursor = new ShardCursor(
+ chunkIndex,
+ blockOffset,
+ eventIndex);
+
+ DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero);
+ int shardIndex = 0;
+ SegmentCursor segmentCursor = new SegmentCursor(
+ segmentTime,
+ new List
+ {
+ shardCursor
+ },
+ shardIndex);
+
+ segment.Setup(r => r.GetCursor()).Returns(segmentCursor);
+
+ DateTimeOffset endDateTime = new DateTimeOffset(2020, 5, 6, 18, 0, 0, TimeSpan.Zero);
+ ChangeFeedCursor expectedCursor = new ChangeFeedCursor(
+ urlHash: containerUri.ToString().GetHashCode(),
+ endDateTime: endDateTime,
+ currentSegmentCursor: segmentCursor);
+
+ ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(
+ containerClient.Object,
+ segmentFactory.Object);
+
+ // Act
+ ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed(
+ IsAsync,
+ continuation: JsonSerializer.Serialize(expectedCursor));
+
+ ChangeFeedCursor actualCursor = changeFeed.GetCursor();
+
+ // Assert
+ Assert.AreEqual(expectedCursor.CursorVersion, actualCursor.CursorVersion);
+ Assert.AreEqual(expectedCursor.EndTime, actualCursor.EndTime);
+ Assert.AreEqual(expectedCursor.UrlHash, actualCursor.UrlHash);
+
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.SegmentTime, actualCursor.CurrentSegmentCursor.SegmentTime);
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardIndex, actualCursor.CurrentSegmentCursor.ShardIndex);
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors.Count, actualCursor.CurrentSegmentCursor.ShardCursors.Count);
+
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset, actualCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset);
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex);
+ Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex);
+
+ containerClient.Verify(r => r.Uri);
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.ExistsAsync(default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.Exists(default));
+ }
+
+ containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath));
+
+ if (IsAsync)
+ {
+ blobClient.Verify(r => r.DownloadAsync());
+ }
+ else
+ {
+ blobClient.Verify(r => r.Download());
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+
+ segmentFactory.Verify(r => r.BuildSegment(
+ IsAsync,
+ "idx/segments/2020/01/16/2300/meta.json",
+ It.Is(
+ r => r.SegmentTime == segmentTime
+ && r.ShardIndex == shardIndex
+ && r.ShardCursors.Count == 1
+ && r.ShardCursors[0].BlockOffset == blockOffset
+ && r.ShardCursors[0].ChunkIndex == chunkIndex
+ && r.ShardCursors[0].EventIndex == eventIndex
+ )));
+
+ segment.Verify(r => r.GetCursor());
+ }
+
+ ///
+ /// This test has 8 total events, 4 segments, and 2 years.
+ /// We call ChangeFeed.GetPage() with a page size of 3, and then again with no page size,
+ /// resulting in two pages with 3 and 5 Events.
+ ///
+ [Test]
+ public async Task GetPage()
+ {
+ // Arrange
+ int eventCount = 8;
+ int segmentCount = 4;
+ Mock serviceClient = new Mock(MockBehavior.Strict);
+ Mock containerClient = new Mock(MockBehavior.Strict);
+ Mock blobClient = new Mock(MockBehavior.Strict);
+ Mock segmentFactory = new Mock(MockBehavior.Strict);
+ Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed");
+
+ List> segments = new List>();
+ for (int i = 0; i < segmentCount; i++)
+ {
+ segments.Add(new Mock(MockBehavior.Strict));
+ }
+
+ // ChangeFeedFactory.BuildChangeFeed() setups.
+ serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object);
+ containerClient.SetupSequence(r => r.Uri)
+ .Returns(containerUri)
+ .Returns(containerUri);
+
+ if (IsAsync)
+ {
+ containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200)));
+ }
+ else
+ {
+ containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200)));
+ }
+
+ containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object);
+
+ using FileStream stream = File.OpenRead(
+ $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}");
+ BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream);
+ Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200));
+
+ if (IsAsync)
+ {
+ blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse);
+ }
+ else
+ {
+ blobClient.Setup(r => r.Download()).Returns(downloadResponse);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(pageable);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync);
+ AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync);
+
+ containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default))
+ .Returns(asyncPageable)
+ .Returns(asyncPageable2);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func);
+
+ Pageable pageable2 =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func);
+
+ containerClient.SetupSequence(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default))
+ .Returns(pageable)
+ .Returns(pageable2);
+ }
+
+ segmentFactory.SetupSequence(r => r.BuildSegment(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(segments[0].Object))
+ .Returns(Task.FromResult(segments[1].Object))
+ .Returns(Task.FromResult(segments[2].Object))
+ .Returns(Task.FromResult(segments[3].Object));
+
+ List events = new List();
+ for (int i = 0; i < eventCount; i++)
+ {
+ events.Add(new BlobChangeFeedEvent
+ {
+ Id = Guid.NewGuid()
+ });
+ }
+
+ segments[0].SetupSequence(r => r.HasNext())
+ .Returns(false);
+ segments[1].SetupSequence(r => r.HasNext())
+ .Returns(true)
+ .Returns(false);
+ segments[2].SetupSequence(r => r.HasNext())
+ .Returns(false);
+ segments[3].SetupSequence(r => r.HasNext())
+ .Returns(true)
+ .Returns(false);
+
+ segments[0].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[0],
+ events[1]
+ }));
+
+ segments[1].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[2]
+ }))
+ .Returns(Task.FromResult(new List
+ {
+ events[3]
+ }));
+
+ segments[2].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[4],
+ events[5]
+ }));
+
+ segments[3].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[6],
+ events[7]
+ }));
+
+ for (int i = 0; i < segments.Count; i++)
+ {
+ segments[i].Setup(r => r.Finalized)
+ .Returns(true);
+ }
+
+ long chunkIndex = 1;
+ long blockOffset = 2;
+ long eventIndex = 3;
+ ShardCursor shardCursor = new ShardCursor(
+ chunkIndex,
+ blockOffset,
+ eventIndex);
+
+ DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero);
+ int shardIndex = 0;
+ SegmentCursor segmentCursor = new SegmentCursor(
+ segmentTime,
+ new List
+ {
+ shardCursor
+ },
+ shardIndex);
+ ChangeFeedCursor changeFeedCursor = new ChangeFeedCursor(
+ containerUri.ToString().GetHashCode(),
+ null,
+ segmentCursor);
+
+ containerClient.SetupSequence(r => r.Uri)
+ .Returns(containerUri)
+ .Returns(containerUri);
+
+ segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor);
+ segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor);
+
+ ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(
+ containerClient.Object,
+ segmentFactory.Object);
+ ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed(
+ IsAsync);
+
+ // Act
+ Page page0 = await changeFeed.GetPage(IsAsync, 3);
+ Page page1 = await changeFeed.GetPage(IsAsync);
+
+ // Assert
+ Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page0.ContinuationToken);
+
+ for (int i = 0; i < 3; i++)
+ {
+ Assert.AreEqual(events[i].Id, page0.Values[i].Id);
+ }
+
+ Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page1.ContinuationToken);
+
+ for (int i = 3; i < events.Count; i++)
+ {
+ Assert.AreEqual(events[i].Id, page1.Values[i - 3].Id);
+ }
+
+ // ChangeFeedFactory.BuildChangeFeed() verifies
+ containerClient.Verify(r => r.Uri);
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.ExistsAsync(default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.Exists(default));
+ }
+
+ containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath));
+
+ if (IsAsync)
+ {
+ blobClient.Verify(r => r.DownloadAsync());
+ }
+ else
+ {
+ blobClient.Verify(r => r.Download());
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2019/",
+ default));
+
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2019/",
+ default));
+
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+
+ // ChangeFeeed.Next() verifies.
+ segments[0].Verify(r => r.HasNext());
+ segments[1].Verify(r => r.HasNext(), Times.Exactly(2));
+ segments[2].Verify(r => r.HasNext());
+ segments[3].Verify(r => r.HasNext(), Times.Exactly(3));
+
+ segments[0].Verify(r => r.GetPage(
+ IsAsync,
+ 3,
+ default));
+
+ segments[1].Verify(r => r.GetPage(
+ IsAsync,
+ 1,
+ default));
+
+ segments[1].Verify(r => r.GetPage(
+ IsAsync,
+ Constants.ChangeFeed.DefaultPageSize,
+ default));
+
+ segments[2].Verify(r => r.GetPage(
+ IsAsync,
+ Constants.ChangeFeed.DefaultPageSize - 1,
+ default));
+
+ segments[3].Verify(r => r.GetPage(
+ IsAsync,
+ Constants.ChangeFeed.DefaultPageSize - 3,
+ default));
+
+ segments[1].Verify(r => r.GetCursor());
+ segments[3].Verify(r => r.GetCursor());
+
+ segments[0].Verify(r => r.Finalized, Times.Exactly(3));
+ segments[1].Verify(r => r.Finalized, Times.Exactly(4));
+ segments[2].Verify(r => r.Finalized, Times.Exactly(1));
+ segments[3].Verify(r => r.Finalized, Times.Exactly(2));
+
+ containerClient.Verify(r => r.Uri, Times.Exactly(2));
+ }
+
+ [Test]
+ public async Task NoYearsAfterStartTime()
+ {
+ // Arrange
+ Mock serviceClient = new Mock(MockBehavior.Strict);
+ Mock containerClient = new Mock(MockBehavior.Strict);
+ Mock blobClient = new Mock(MockBehavior.Strict);
+ Mock segmentFactory = new Mock(MockBehavior.Strict);
+ Mock segment = new Mock(MockBehavior.Strict);
+
+ Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed");
+
+ serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object);
+
+ if (IsAsync)
+ {
+ containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200)));
+ }
+ else
+ {
+ containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200)));
+ }
+
+ containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object);
+
+ using FileStream stream = File.OpenRead(
+ $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}");
+ BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream);
+ Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200));
+
+ if (IsAsync)
+ {
+ blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse);
+ }
+ else
+ {
+ blobClient.Setup(r => r.Download()).Returns(downloadResponse);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetYearPathFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(pageable);
+ }
+
+ ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(
+ containerClient.Object,
+ segmentFactory.Object);
+ ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed(
+ IsAsync,
+ startTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero));
+
+ // Act
+ bool hasNext = changeFeed.HasNext();
+
+ // Assert
+ Assert.IsFalse(hasNext);
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.ExistsAsync(default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.Exists(default));
+ }
+
+ containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath));
+
+ if (IsAsync)
+ {
+ blobClient.Verify(r => r.DownloadAsync());
+ }
+ else
+ {
+ blobClient.Verify(r => r.Download());
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+ }
+
+ [Test]
+ public async Task NoSegmentsRemainingInStartYear()
+ {
+ // Arrange
+ int eventCount = 2;
+ int segmentCount = 2;
+ Mock serviceClient = new Mock(MockBehavior.Strict);
+ Mock containerClient = new Mock(MockBehavior.Strict);
+ Mock blobClient = new Mock(MockBehavior.Strict);
+ Mock segmentFactory = new Mock(MockBehavior.Strict);
+ Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed");
+
+ List> segments = new List>();
+ for (int i = 0; i < segmentCount; i++)
+ {
+ segments.Add(new Mock(MockBehavior.Strict));
+ }
+
+ // ChangeFeedFactory.BuildChangeFeed() setups.
+ serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object);
+ containerClient.SetupSequence(r => r.Uri)
+ .Returns(containerUri)
+ .Returns(containerUri);
+
+ if (IsAsync)
+ {
+ containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200)));
+ }
+ else
+ {
+ containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200)));
+ }
+
+ containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object);
+
+ using FileStream stream = File.OpenRead(
+ $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}");
+ BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream);
+ Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200));
+
+ if (IsAsync)
+ {
+ blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse);
+ }
+ else
+ {
+ blobClient.Setup(r => r.Download()).Returns(downloadResponse);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(asyncPageable);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc);
+
+ containerClient.Setup(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default)).Returns(pageable);
+ }
+
+ if (IsAsync)
+ {
+ AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync);
+ AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync);
+
+ containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default))
+ .Returns(asyncPageable)
+ .Returns(asyncPageable2);
+ }
+ else
+ {
+ Pageable pageable =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func);
+
+ Pageable pageable2 =
+ PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func);
+
+ containerClient.SetupSequence(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ It.IsAny(),
+ default))
+ .Returns(pageable)
+ .Returns(pageable2);
+ }
+
+ segmentFactory.SetupSequence(r => r.BuildSegment(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(segments[0].Object))
+ .Returns(Task.FromResult(segments[1].Object));
+
+ List events = new List();
+ for (int i = 0; i < eventCount; i++)
+ {
+ events.Add(new BlobChangeFeedEvent
+ {
+ Id = Guid.NewGuid()
+ });
+ }
+
+ segments[0].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[0]
+ }));
+
+ segments[1].SetupSequence(r => r.GetPage(
+ It.IsAny(),
+ It.IsAny(),
+ default))
+ .Returns(Task.FromResult(new List
+ {
+ events[1]
+ }));
+
+ segments[0].SetupSequence(r => r.HasNext())
+ .Returns(false);
+ segments[1].SetupSequence(r => r.HasNext())
+ .Returns(true)
+ .Returns(false);
+
+ segments[1].Setup(r => r.GetCursor())
+ .Returns(new SegmentCursor());
+
+ for (int i = 0; i < segments.Count; i++)
+ {
+ segments[i].Setup(r => r.Finalized)
+ .Returns(true);
+ }
+
+ ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(
+ containerClient.Object,
+ segmentFactory.Object);
+ ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed(
+ IsAsync,
+ startTime: new DateTimeOffset(2019, 6, 1, 0, 0, 0, TimeSpan.Zero));
+
+ // Act
+ Page page = await changeFeed.GetPage(IsAsync);
+
+ // Assert
+ Assert.AreEqual(2, page.Values.Count);
+ Assert.AreEqual(events[0].Id, page.Values[0].Id);
+ Assert.AreEqual(events[1].Id, page.Values[1].Id);
+
+ containerClient.Verify(r => r.Uri);
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.ExistsAsync(default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.Exists(default));
+ }
+
+ containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath));
+
+ if (IsAsync)
+ {
+ blobClient.Verify(r => r.DownloadAsync());
+ }
+ else
+ {
+ blobClient.Verify(r => r.Download());
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ "/",
+ Constants.ChangeFeed.SegmentPrefix,
+ default));
+ }
+
+ if (IsAsync)
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2019/",
+ default));
+
+ containerClient.Verify(r => r.GetBlobsByHierarchyAsync(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+ else
+ {
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2019/",
+ default));
+
+ containerClient.Verify(r => r.GetBlobsByHierarchy(
+ default,
+ default,
+ default,
+ "idx/segments/2020/",
+ default));
+ }
+
+ // ChangeFeeed.Next() verifies.
+ segments[0].Verify(r => r.HasNext(), Times.Exactly(1));
+
+ segments[0].Verify(r => r.GetPage(
+ IsAsync,
+ Constants.ChangeFeed.DefaultPageSize,
+ default));
+
+ segments[1].Verify(r => r.HasNext(), Times.Exactly(3));
+
+ segments[1].Verify(r => r.GetPage(
+ IsAsync,
+ Constants.ChangeFeed.DefaultPageSize - 1,
+ default));
+
+ containerClient.Verify(r => r.Uri, Times.Exactly(1));
+
+ }
+
+ public static Task> GetYearsPathShortFuncAsync(string continuation, int? pageSizeHint)
+ => Task.FromResult(GetYearsPathShortFunc(continuation, pageSizeHint));
+
+ public static Page GetYearsPathShortFunc(
+ string continuation,
+ int? pageSizeHint)
+ => new BlobHierarchyItemPage(new List
+ {
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null),
+ BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null)
+ });
+
+ public static Task> GetSegmentsInYear2019FuncAsync(
+ string continuation,
+ int? pageSizeHint)
+ => Task.FromResult(GetSegmentsInYear2019Func(continuation, pageSizeHint));
+
+ public static Page GetSegmentsInYear2019Func(
+ string continuation,
+ int? pageSizeHint)
+ => new BlobHierarchyItemPage(new List
+ {
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2019/03/02/2000/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2019/04/03/2200/meta.json", false, null))
+ });
+
+ public static Task> GetSegmentsInYear2020FuncAsync(
+ string continuation,
+ int? pageSizeHint)
+ => Task.FromResult(GetSegmentsInYear2020Func(continuation, pageSizeHint));
+
+ public static Page GetSegmentsInYear2020Func(
+ string continuation,
+ int? pageSizeHint)
+ => new BlobHierarchyItemPage(new List
+ {
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)),
+ BlobsModelFactory.BlobHierarchyItem(
+ null,
+ BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null))
+ });
+ }
+}
diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs
new file mode 100644
index 0000000000000..71cff0933289f
--- /dev/null
+++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs
@@ -0,0 +1,266 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Storage.Blobs.ChangeFeed.Models;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Internal.Avro;
+using Moq;
+using NUnit.Framework;
+
+namespace Azure.Storage.Blobs.ChangeFeed.Tests
+{
+ public class ChunkTests : ChangeFeedTestBase
+ {
+ public ChunkTests(bool async)
+ : base(async, null /* RecordedTestMode.Record /* to re-record */)
+ {
+ }
+
+ ///
+ /// Tests Chunk.HasNext() when the underlying AvroReader.HasNext() returns true.
+ ///
+ [Test]
+ public void HasNext_True()
+ {
+ // Arrange
+ string chunkPath = "chunkPath";
+ Mock containerClient = new Mock(MockBehavior.Strict);
+ Mock blobClient = new Mock(MockBehavior.Strict);
+ Mock avroReaderFactory = new Mock(MockBehavior.Strict);
+ Mock avroReader = new Mock(MockBehavior.Strict);
+ Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict);
+ Mock lazyLoadingBlobStream = new Mock(MockBehavior.Strict);
+
+ containerClient.Setup(r => r.GetBlobClient(It.IsAny