From 34ef0a60b6f3fbdd72e3ecfecd9f1d6de5a6f33f Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 29 Apr 2020 19:01:16 -0700 Subject: [PATCH 01/30] Added Change Feed --- eng/Packages.Data.props | 1 + .../BreakingChanges.txt | 6 + .../CHANGELOG.md | 4 + .../Azure.Storage.Blobs.ChangeFeed/README.md | 20 + ...rage.Blobs.ChangeFeed.Samples.Tests.csproj | 31 ++ .../samples/README.md | 15 + .../samples/Sample01a_HelloWorld.cs | 27 ++ .../samples/Sample01b_HelloWorldAsync.cs | 29 ++ .../samples/Sample02_Auth.cs | 26 ++ .../src/AssemblyInfo.cs | 13 + .../src/Azure.Storage.Blobs.ChangeFeed.csproj | 48 +++ .../src/BlobChangeFeedAsyncPagable.cs | 66 +++ .../src/BlobChangeFeedClient.cs | 120 ++++++ .../src/BlobChangeFeedExtensions.cs | 104 +++++ .../src/BlobChangeFeedPagable.cs | 57 +++ .../src/ChangeFeed.cs | 385 ++++++++++++++++++ .../src/Chunk.cs | 111 +++++ .../src/LazyLoadingBlobStream.cs | 242 +++++++++++ .../src/Models/BlobChangeFeedCursor.cs | 48 +++ .../src/Models/BlobChangeFeedEvent.cs | 93 +++++ .../src/Models/BlobChangeFeedEventData.cs | 129 ++++++ .../src/Models/BlobChangeFeedEventPage.cs | 37 ++ .../src/Models/BlobChangeFeedEventType.cs | 21 + .../src/Models/BlobChangeFeedModelFactory.cs | 75 ++++ .../src/Models/BlobChangeFeedSegmentCursor.cs | 42 ++ .../src/Models/BlobChangeFeedShardCursor.cs | 44 ++ .../src/Segment.cs | 178 ++++++++ .../src/Shard.cs | 160 ++++++++ ...zure.Storage.Blobs.ChangeFeed.Tests.csproj | 20 + .../tests/BlobChangeFeedAsyncPagableTests.cs | 92 +++++ .../tests/BlobChangeFeedExtensionsTests.cs | 99 +++++ .../tests/BlobChangeFeedPagableTests.cs | 35 ++ .../tests/ChangeFeedTestBase.cs | 106 +++++ .../tests/ChangeFeedTests.cs | 182 +++++++++ .../tests/LazyLoadingBlobStreamTests.cs | 92 +++++ .../src/Shared/Constants.cs | 13 + sdk/storage/Azure.Storage.sln | 18 + 37 files changed, 2789 insertions(+) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs diff --git a/eng/Packages.Data.props b/eng/Packages.Data.props index b63ce26fded2c..b286914770d0c 100755 --- a/eng/Packages.Data.props +++ b/eng/Packages.Data.props @@ -23,6 +23,7 @@ + diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt new file mode 100644 index 0000000000000..6717353356a1c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/BreakingChanges.txt @@ -0,0 +1,6 @@ +Breaking Changes +================ + +12.0.0-preview.1 +-------------------------- +- New Azure.Storage.Blobs.ChangeFeed client library. \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md new file mode 100644 index 0000000000000..de3267ebd7456 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/CHANGELOG.md @@ -0,0 +1,4 @@ +# Release History + +## 12.0.0-preview.1 +This preview is the first release supporting Azure Storage Blobs Change Feed. \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md new file mode 100644 index 0000000000000..293260514c432 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md @@ -0,0 +1,20 @@ +# Azure Storage Blobs Change Feed client library for .NET + +> Server Version: 2019-07-07 +## Getting started +- TODO + +## Key concepts +- TODO + +## Examples +- TODO + +## Troubleshooting +- TODO + +## Next steps +- TODO + +## Contributing +- TODO \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj new file mode 100644 index 0000000000000..2f30c91b605f9 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj @@ -0,0 +1,31 @@ + + + $(RequiredTargetFrameworks) + Microsoft Azure.Storage.Blobs.ChangeFeed client library samples + false + + + + + + + + + + PreserveNewest + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md new file mode 100644 index 0000000000000..e4847a5497efa --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/README.md @@ -0,0 +1,15 @@ +--- +page_type: sample +languages: +- csharp +products: +- azure +- azure-storage +name: Azure.Storage.ChangeFeed samples for .NET +description: Samples for the Azure.Storage.Blobs.ChangeFeed client library +--- + +# Azure.Storage.ChangeFeed Samples + +- sample 0 +- sample 1 \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs new file mode 100644 index 0000000000000..3968fd12b0a91 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection.Metadata.Ecma335; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Samples +{ + /// + /// Basic Azure ChangeFeed Storage samples. + /// + public class Sample01a_HelloWorld : SampleTest + { + /// + /// Sample sample. + /// + [Test] + public void SampleSample() + { + Assert.AreEqual(1, 1); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs new file mode 100644 index 0000000000000..aaea9a654464c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure.Storage; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Samples +{ + /// + /// Basic Azure ChangeFeed Storage samples. + /// + public class Sample01b_HelloWorldAsync : SampleTest + { + /// + /// Sample sample. + /// + [Test] + public async Task SampleSample() + { + await Task.CompletedTask; + Assert.AreEqual(1, 1); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs new file mode 100644 index 0000000000000..aa7410280d819 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.IO; +using System.Net; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.Storage; +using Azure.Storage.Sas; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Samples +{ + public class Sample02_Auth : SampleTest + { + [Test] + public async Task SampleSample() + { + await Task.CompletedTask; + Assert.IsTrue(true); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs new file mode 100644 index 0000000000000..0c57a2e78eba5 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AssemblyInfo.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("Azure.Storage.Blobs.ChangeFeed.Tests, PublicKey=" + + "0024000004800000940000000602000000240000525341310004000001000100d15ddcb2968829" + + "5338af4b7686603fe614abd555e09efba8fb88ee09e1f7b1ccaeed2e8f823fa9eef3fdd60217fc" + + "012ea67d2479751a0b8c087a4185541b851bd8b16f8d91b840e51b1cb0ba6fe647997e57429265" + + "e85ef62d565db50a69ae1647d54d7bd855e4db3d8a91510e5bcbd0edfbbecaa20a7bd9ae74593d" + + "aa7b11b4")] +[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7")] +[assembly: Azure.Core.AzureResourceProviderNamespace("Microsoft.Storage")] diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj new file mode 100644 index 0000000000000..d1d9b80dcdce9 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Azure.Storage.Blobs.ChangeFeed.csproj @@ -0,0 +1,48 @@ + + + $(RequiredTargetFrameworks) + + + Microsoft Azure.Storage.Blobs.ChangeFeed client library + 12.0.0-preview.1 + ChangeFeedSDK;$(DefineConstants) + Microsoft Azure Change Feed;Microsoft;Azure;Storage;StorageScalable;$(PackageCommonTags) + + This client library enables working with the Microsoft Azure Storage Change Feed feature to review and monitor changes to an Azure Storage account. + For this release see notes - https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/README.md and https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/CHANGELOG.md + in addition to the breaking changes https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.ChangeFeed/BreakingChanges.txt + Microsoft Azure Storage quickstarts and tutorials - TODO + Microsoft Azure Storage REST API Reference - TODO + + false + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs new file mode 100644 index 0000000000000..263a3a93b35bf --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedPagableAsync. + /// + public class BlobChangeFeedAsyncPagable : AsyncPageable + { + private ChangeFeed _changeFeed; + + /// + /// Internal constructor. + /// + internal BlobChangeFeedAsyncPagable( + BlobServiceClient blobBerviceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + _changeFeed = new ChangeFeed( + blobBerviceClient, + startTime, + endTime); + } + + internal BlobChangeFeedAsyncPagable( + BlobServiceClient blobServiceClient, + string continuation) + { + _changeFeed = new ChangeFeed( + blobServiceClient, + continuation); + } + + /// + /// AsPages. + /// + /// + /// + /// + public override async IAsyncEnumerable> AsPages( + string continuationToken = null, + int? pageSizeHint = null) + { + while (_changeFeed.HasNext()) + { + yield return await _changeFeed.GetPage( + async: true, + pageSize: pageSizeHint ?? 512).ConfigureAwait(false); + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs new file mode 100644 index 0000000000000..43f2ccbcd3569 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedClient. + /// + public class BlobChangeFeedClient + { + private BlobServiceClient _blobServiceClient; + + /// + /// Constructor. + /// + protected BlobChangeFeedClient() { } + + internal BlobChangeFeedClient(BlobServiceClient blobServiceClient) + { + _blobServiceClient = blobServiceClient; + } + + /// + /// GetChanges. + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges() +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient); + return pageable; + } + + /// + /// GetChanges. + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges(string continuation) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient, + continuation); + return pageable; + } + + /// + /// GetChanges. + /// + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedPagable GetChanges(DateTimeOffset start = default, DateTimeOffset end = default) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedPagable pageable = new BlobChangeFeedPagable( + _blobServiceClient, + start, + end); + return pageable; + } + + /// + /// GetChangesAsync. + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync() +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient); + return asyncPagable; + } + + /// + /// GetChangesAsync. + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync(string continuation) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable(_blobServiceClient, + continuation); + return asyncPagable; + } + + /// + /// GetChangesAsync. + /// + /// + /// + /// . +#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + public virtual BlobChangeFeedAsyncPagable GetChangesAsync( + DateTimeOffset start = default, + DateTimeOffset end = default) +#pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. + { + BlobChangeFeedAsyncPagable asyncPagable = new BlobChangeFeedAsyncPagable( + _blobServiceClient, + start, + end); + return asyncPagable; + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs new file mode 100644 index 0000000000000..cd711c350c154 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Globalization; +using Azure.Storage.Blobs; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedExtensions. + /// + public static class BlobChangeFeedExtensions + { + /// + /// GetChangeFeedClient. + /// + /// + /// . + public static BlobChangeFeedClient GetChangeFeedClient(this BlobServiceClient serviceClient) + { + return new BlobChangeFeedClient(serviceClient); + } + + /// + /// Builds a DateTimeOffset from a segment path. + /// + internal static DateTimeOffset? ToDateTimeOffset(this string segmentPath) + { + if (segmentPath == null) + { + return default; + } + segmentPath = segmentPath.Trim('/'); + string[] splitPath = segmentPath.Split('/'); + return new DateTimeOffset( + year: int.Parse(splitPath[2], CultureInfo.InvariantCulture), + month: splitPath.Length >= 4 + ? int.Parse(splitPath[3], CultureInfo.InvariantCulture) + : 1, + day: splitPath.Length >= 5 + ? int.Parse(splitPath[4], CultureInfo.InvariantCulture) + : 1, + hour: splitPath.Length >= 6 + ? int.Parse(splitPath[5], CultureInfo.InvariantCulture) / 100 + : 0, + minute: 0, + second: 0, + offset: TimeSpan.Zero); + } + + /// + /// Rounds a DateTimeOffset down to the nearest hour. + /// + internal static DateTimeOffset? RoundDownToNearestHour(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + return new DateTimeOffset( + year: dateTimeOffset.Value.Year, + month: dateTimeOffset.Value.Month, + day: dateTimeOffset.Value.Day, + hour: dateTimeOffset.Value.Hour, + minute: 0, + second: 0, + offset: dateTimeOffset.Value.Offset); + } + + /// + /// Rounds a DateTimeOffset up to the nearest hour. + /// + internal static DateTimeOffset? RoundUpToNearestHour(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + DateTimeOffset? newDateTimeOffest = dateTimeOffset.RoundDownToNearestHour(); + + return newDateTimeOffest.Value.AddHours(1); + } + + internal static DateTimeOffset? RoundDownToNearestYear(this DateTimeOffset? dateTimeOffset) + { + if (dateTimeOffset == null) + { + return null; + } + + return new DateTimeOffset( + year: dateTimeOffset.Value.Year, + month: 1, + day: 1, + hour: 0, + minute: 0, + second: 0, + offset: TimeSpan.Zero); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs new file mode 100644 index 0000000000000..8201d5f1128b3 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// BlobChangeFeedPagable. + /// + public class BlobChangeFeedPagable : Pageable + { + private ChangeFeed _changeFeed; + + internal BlobChangeFeedPagable( + BlobServiceClient serviceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + _changeFeed = new ChangeFeed( + serviceClient, + startTime, + endTime); + } + + internal BlobChangeFeedPagable( + BlobServiceClient serviceClient, + string continuation) + { + _changeFeed = new ChangeFeed( + serviceClient, + continuation); + } + + /// + /// AsPages. + /// + /// + /// + /// + public override IEnumerable> AsPages(string continuationToken = null, int? pageSizeHint = null) + { + while (_changeFeed.HasNext()) + { + yield return _changeFeed.GetPage( + async: false, + pageSize: pageSizeHint ?? 512).EnsureCompleted(); + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs new file mode 100644 index 0000000000000..273dad09de74f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -0,0 +1,385 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChangeFeed + { + /// + /// BlobContainerClient for making List Blob requests and creating Segments. + /// + private readonly BlobContainerClient _containerClient; + + /// + /// Queue of paths to years we haven't processed yet. + /// + private Queue _years; + + /// + /// Paths to segments in the current year we haven't processed yet. + /// + private Queue _segments; + + /// + /// The Segment we are currently processing. + /// + private Segment _currentSegment; + + private readonly BlobChangeFeedSegmentCursor _currentSegmentCursor; + + /// + /// The latest time the Change Feed can safely be read from. + /// + //TODO this can advance while we are iterating through the Change Feed. Figure out how to support this. + private DateTimeOffset _lastConsumable; + + /// + /// User-specified start time. If the start time occurs before Change Feed was enabled + /// for this account, we will start at the beginning of the Change Feed. + /// + private DateTimeOffset? _startTime; + + /// + /// User-specified end time. If the end time occurs after _lastConsumable, we will + /// end at _lastConsumable. + /// + private DateTimeOffset? _endTime; + + /// + /// If this ChangeFeed has been initalized. + /// + private bool _isInitalized; + + // Start time will be rounded down to the nearest hour. + public ChangeFeed( + BlobServiceClient blobServiceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); + _years = new Queue(); + _segments = new Queue(); + _isInitalized = false; + _startTime = startTime.RoundDownToNearestHour(); + _endTime = endTime.RoundUpToNearestHour(); + } + + public ChangeFeed( + BlobServiceClient blobServiceClient, + string continutation) + { + _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); + BlobChangeFeedCursor cursor = JsonSerializer.Deserialize(continutation); + ValidateCursor(_containerClient, cursor); + _years = new Queue(); + _segments = new Queue(); + _isInitalized = false; + _startTime = cursor.CurrentSegmentCursor.SegmentTime; + _endTime = cursor.EndTime; + _currentSegmentCursor = cursor.CurrentSegmentCursor; + } + + /// + /// Internal constructor for unit tests. + /// + /// + internal ChangeFeed( + BlobContainerClient containerClient) + { + _containerClient = containerClient; + } + + private async Task Initalize(bool async) + { + // Check if Change Feed has been abled for this account. + bool changeFeedContainerExists; + + if (async) + { + changeFeedContainerExists = await _containerClient.ExistsAsync().ConfigureAwait(false); + } + else + { + changeFeedContainerExists = _containerClient.Exists(); + } + + if (!changeFeedContainerExists) + { + //TODO improve this error message + throw new ArgumentException("Change Feed hasn't been enabled on this account, or is current being enabled."); + } + + // Get last consumable + BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); + BlobDownloadInfo blobDownloadInfo; + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + JsonDocument jsonMetaSegment; + if (async) + { + jsonMetaSegment = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content); + } + + //TODO what happens when _lastConsumable advances an hour? + _lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); + + // Get year paths + _years = await GetYearPaths(async).ConfigureAwait(false); + + // Dequeue any years that occur before start time + if (_startTime.HasValue) + { + while (_years.Count > 0 + && _years.Peek().ToDateTimeOffset() < _startTime.RoundDownToNearestYear()) + { + _years.Dequeue(); + } + } + + if (_years.Count == 0) + { + return; + } + + string firstYearPath = _years.Dequeue(); + + // Get Segments for first year + _segments = await GetSegmentsInYear( + async: async, + yearPath: firstYearPath, + startTime: _startTime, + endTime: MinDateTime(_lastConsumable, _endTime)) + .ConfigureAwait(false); + + _currentSegment = new Segment( + _containerClient, + _segments.Dequeue(), + _currentSegmentCursor); + _isInitalized = true; + } + + //TODO current round robin strategy doesn't work for live streaming! + // The last segment may still be adding chunks. + public async Task> GetPage( + bool async, + int pageSize = 512) + { + if (!_isInitalized) + { + await Initalize(async).ConfigureAwait(false); + } + + if (!HasNext()) + { + throw new InvalidOperationException("Change feed doesn't have any more events"); + } + + //TODO what should we return here? Also do we really need to check this on every page? + if (_currentSegment.DateTime > _endTime) + { + return new BlobChangeFeedEventPage(); + } + + //TODO what should we return here? Also do we really need to check this on every page? + if (_currentSegment.DateTime > _lastConsumable) + { + return new BlobChangeFeedEventPage(); + } + + // Get next page + List blobChangeFeedEvents = new List(); + + int remainingEvents = pageSize; + while (blobChangeFeedEvents.Count < pageSize + && HasNext()) + { + //TODO what if segment doesn't have a page size worth of data? + List newEvents = await _currentSegment.GetPage(async, remainingEvents).ConfigureAwait(false); + blobChangeFeedEvents.AddRange(newEvents); + remainingEvents -= newEvents.Count; + await AdvanceSegmentIfNecessary(async).ConfigureAwait(false); + } + + return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor())); + } + + + + public bool HasNext() + { + if (!_isInitalized) + { + return true; + } + + // We have no more segments, years, and the current segment doesn't have hext. + if (_segments.Count == 0 && _years.Count == 0 && !_currentSegment.HasNext()) + { + return false; + } + + DateTimeOffset end = MinDateTime(_lastConsumable, _endTime); + + return _currentSegment.DateTime <= end; + } + + //TODO how do update this? + public DateTimeOffset LastConsumable() + { + return _lastConsumable; + } + + internal BlobChangeFeedCursor GetCursor() + => new BlobChangeFeedCursor( + urlHash: _containerClient.Uri.ToString().GetHashCode(), + endDateTime: _endTime, + currentSegmentCursor: _currentSegment.GetCursor()); + + internal async Task> GetSegmentsInYear( + bool async, + string yearPath, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: yearPath) + .ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: yearPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + + return new Queue(list); + } + + private async Task AdvanceSegmentIfNecessary(bool async) + { + // If the current segment is completed, remove it + if (!_currentSegment.HasNext() && _segments.Count > 0) + { + _currentSegment = new Segment(_containerClient, _segments.Dequeue()); + } + + // If _segments is empty, refill it + // TODO pull this out into private method + else if (_segments.Count == 0 && _years.Count > 0) + { + string yearPath = _years.Dequeue(); + + // Get Segments for first year + _segments = await GetSegmentsInYear( + async: async, + yearPath: yearPath, + startTime: _startTime, + endTime: _endTime) + .ConfigureAwait(false); + + if (_segments.Count > 0) + { + _currentSegment = new Segment(_containerClient, _segments.Dequeue()); + } + } + } + + internal async Task> GetYearPaths(bool async) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/").ConfigureAwait(false)) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/")) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + return new Queue(list); + } + + private static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTimeOffset? endDate) + { + if (endDate.HasValue && endDate.Value < lastConsumable) + { + return endDate.Value; + } + + return lastConsumable; + } + + private static void ValidateCursor( + BlobContainerClient containerClient, + BlobChangeFeedCursor cursor) + { + if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash) + { + throw new ArgumentException("Cursor URL does not match container URL"); + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs new file mode 100644 index 0000000000000..6a4bde1822b97 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Internal.Avro; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Chunk. + /// + internal class Chunk : IDisposable + { + /// + /// Blob Client for downloading the Chunk. + /// + private readonly BlobClient _blobClient; + + /// + /// Avro Reader to parser the Events. + /// + private AvroReader _avroReader; + + /// + /// The byte offset of the beginning of the current + /// Block. + /// + public long BlockOffset { get; private set; } + + /// + /// The index of the Event within the current block. + /// + public long EventIndex { get; private set; } + + + /// + /// Data stream. + /// + private LazyLoadingBlobStream _dataStream; + + /// + /// Avro head stream. + /// + private Stream _headStream; + + public Chunk( + BlobContainerClient containerClient, + string chunkPath, + long? blockOffset = default, + long? eventIndex = default) + { + _blobClient = containerClient.GetBlobClient(chunkPath); + BlockOffset = blockOffset ?? 0; + EventIndex = eventIndex ?? 0; + + _dataStream = new LazyLoadingBlobStream( + _blobClient, + offset: BlockOffset, + blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize); + + // We aren't starting from the beginning of the Chunk + if (BlockOffset != 0) + { + _headStream = new LazyLoadingBlobStream( + _blobClient, + offset: 0, + blockSize: 3 * Constants.KB); + + _avroReader = new AvroReader( + _dataStream, + _headStream, + BlockOffset, + EventIndex); + } + else + { + _avroReader = new AvroReader(_dataStream); + } + } + + //TODO what if the Segment isn't Finalized?? + public bool HasNext() + => _avroReader.HasNext(); + + public async Task Next(bool async) + { + Dictionary result; + + if (!HasNext()) + { + return null; + } + + result = (Dictionary)await _avroReader.Next(async).ConfigureAwait(false); + BlockOffset = _avroReader.BlockOffset; + EventIndex = _avroReader.ObjectIndex; + return new BlobChangeFeedEvent(result); + } + + public void Dispose() + { + _dataStream.Dispose(); + _headStream.Dispose(); + GC.SuppressFinalize(this); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs new file mode 100644 index 0000000000000..84ad361165d6f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class LazyLoadingBlobStream : Stream + { + /// + /// BlobClient to make download calls with. + /// + private readonly BlobClient _blobClient; + + /// + /// The offset within the blob of the next block we will download. + /// + private long _offset; + + /// + /// The number of bytes we'll download with each download call. + /// + private readonly long _blockSize; + + /// + /// Underlying Stream. + /// + private Stream _stream; + + /// + /// If this LazyLoadingBlobStream has been initalized. + /// + private bool _initalized; + + /// + /// The number of bytes in the last download call. + /// + private long _lastDownloadBytes; + + /// + /// The current length of the blob. + /// + private long _blobLength; + + public LazyLoadingBlobStream(BlobClient blobClient, long offset, long blockSize) + { + _blobClient = blobClient; + _offset = offset; + _blockSize = blockSize; + _initalized = false; + } + + /// + public override int Read( + byte[] buffer, + int offset, + int count) + => ReadInternal( + async: false, + buffer, + offset, + count).EnsureCompleted(); + + /// + public override async Task ReadAsync( + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken) + => await ReadInternal( + async: true, + buffer, + offset, + count, + cancellationToken).ConfigureAwait(false); + + /// + /// Initalizes this LazyLoadingBlobStream. + /// The number of bytes that were downloaded in the first download request. + /// + private async Task Initalize(bool async, CancellationToken cancellationToken) + { + await DownloadBlock(async, cancellationToken).ConfigureAwait(false); + _initalized = true; + } + + /// + /// Downloads the next block. + /// Number of bytes that were downloaded + /// + private async Task DownloadBlock(bool async, CancellationToken cancellationToken) + { + Response response; + HttpRange range = new HttpRange(_offset, _blockSize); + + response = async + ? await _blobClient.DownloadAsync(range, cancellationToken: cancellationToken).ConfigureAwait(false) + : _blobClient.Download(range); + _stream = response.Value.Content; + _offset += response.Value.ContentLength; + _lastDownloadBytes = response.Value.ContentLength; + _blobLength = GetBlobLength(response); + } + + /// + /// Shared sync and async Read implementation. + /// + private async Task ReadInternal( + bool async, + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken = default) + { + ValidateReadParameters(buffer, offset, count); + + if (!_initalized) + { + await Initalize(async, cancellationToken: cancellationToken).ConfigureAwait(false); + if (_lastDownloadBytes == 0) + { + return 0; + } + } + + int totalCopiedBytes = 0; + do + { + int copiedBytes = async + ? await _stream.ReadAsync(buffer, offset, count).ConfigureAwait(false) + : _stream.Read(buffer, offset, count); + offset += copiedBytes; + count -= copiedBytes; + totalCopiedBytes += copiedBytes; + + // We've run out of bytes in the current block. + if (copiedBytes == 0) + { + // We hit the end of the blob with the last download call. + //TODO what if the blob is growing? + if (_offset == _blobLength) + { + return totalCopiedBytes; + } + + // Download the next block + else + { + await DownloadBlock(async, cancellationToken).ConfigureAwait(false); + } + } + } + while (count > 0); + return totalCopiedBytes; + } + + private static void ValidateReadParameters(byte[] buffer, int offset, int count) + { + if (buffer == null) + { + throw new ArgumentNullException($"{nameof(buffer)}", $"{nameof(buffer)} cannot be null."); + } + + if (offset < 0) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot be less than 0."); + } + + if (offset > buffer.Length) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} cannot exceed {nameof(buffer)} length."); + } + + if (count < 0) + { + throw new ArgumentOutOfRangeException($"{nameof(count)} cannot be less than 0."); + } + + if (offset + count > buffer.Length) + { + throw new ArgumentOutOfRangeException($"{nameof(offset)} + {nameof(count)} cannot exceed {nameof(buffer)} length."); + } + } + + private static long GetBlobLength(Response response) + { + string lengthString = response.Value.Details.ContentRange; + string[] split = lengthString.Split('/'); + return Convert.ToInt64(split[1], CultureInfo.InvariantCulture); + } + + /// + public override bool CanRead => true; + + /// + public override bool CanSeek => false; + + /// + public override bool CanWrite => throw new NotSupportedException(); + + public override long Length => throw new NotSupportedException(); + + /// + public override long Position { + get => _stream.Position; + set => throw new NotSupportedException(); + } + + /// + public override void Flush() + { + } + + /// + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + /// + public override void SetLength(long value) + { + throw new NotSupportedException(); + } + + /// + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException(); + } + + protected override void Dispose(bool disposing) => _stream.Dispose(); + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs new file mode 100644 index 0000000000000..75e627e5292b5 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedCursor. + /// + internal class BlobChangeFeedCursor + { + /// + /// CursorVersion. + /// + public int CursorVersion { get; set; } + + /// + /// UrlHash. + /// + public long UrlHash { get; set; } + + /// + /// EndDateTime. + /// + public DateTimeOffset? EndTime { get; set; } + + /// + /// The Segment Cursor for the current segment. + /// + public BlobChangeFeedSegmentCursor CurrentSegmentCursor { get; set; } + + internal BlobChangeFeedCursor( + long urlHash, + DateTimeOffset? endDateTime, + BlobChangeFeedSegmentCursor currentSegmentCursor) + { + CursorVersion = 1; + UrlHash = urlHash; + EndTime = endDateTime; + CurrentSegmentCursor = currentSegmentCursor; + } + + public BlobChangeFeedCursor() { } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs new file mode 100644 index 0000000000000..ddf5eee4c4fe0 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEvent. + /// + public class BlobChangeFeedEvent + { + /// + /// Internal constructor. + /// + internal BlobChangeFeedEvent(Dictionary record) + { + Topic = (string)record["topic"]; + Subject = (string)record["subject"]; + EventType = ToBlobChangeFeedEventType((string)record["eventType"]); + EventTime = DateTimeOffset.Parse((string)record["eventTime"], CultureInfo.InvariantCulture); + Id = Guid.Parse((string)record["id"]); + EventData = new BlobChangeFeedEventData((Dictionary)record["data"]); + record.TryGetValue("dataVersion", out object dataVersion); + DataVersion = (long?)dataVersion; + record.TryGetValue("metadataVersion", out object metadataVersion); + MetadataVersion = (string)metadataVersion; + } + + internal BlobChangeFeedEvent() { } + + /// + /// Full resource path to the event source. This field is not writeable. Event Grid provides this value. + /// + public string Topic { get; internal set; } + + /// + /// Publisher-defined path to the event subject. + /// + public string Subject { get; internal set; } + + /// + /// One of the registered event types for this event source. + /// + public BlobChangeFeedEventType EventType { get; internal set; } + + /// + /// The time the event is generated based on the provider's UTC time. + /// + public DateTimeOffset EventTime { get; internal set; } + + /// + /// Unique identifier for the event. + /// + public Guid Id { get; internal set; } + + /// + /// Blob storage event data. + /// + public BlobChangeFeedEventData EventData { get; internal set; } + + /// + /// The schema version of the data object. The publisher defines the schema version. + /// + public long? DataVersion { get; internal set; } + + /// + /// The schema version of the event metadata. Event Grid defines the schema of the top-level properties. + /// Event Grid provides this value. + /// + public string MetadataVersion { get; internal set; } + + /// + public override string ToString() => $"{EventTime}: {EventType} {Subject} ({EventData?.ToString() ?? "Unknown Event"})"; + + private static BlobChangeFeedEventType ToBlobChangeFeedEventType(string s) + { + switch (s) + { + case "BlobCreated": + return BlobChangeFeedEventType.BlobCreated; + case "BlobDeleted": + return BlobChangeFeedEventType.BlobDeleted; + default: + return default; + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs new file mode 100644 index 0000000000000..5786b5d63c336 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEventData. + /// + public class BlobChangeFeedEventData + { + /// + /// Internal constructor. + /// + internal BlobChangeFeedEventData() { } + + internal BlobChangeFeedEventData(Dictionary record) + { + Api = ((string)record["api"]); + ClientRequestId = Guid.Parse((string)record["clientRequestId"]); + RequestId = Guid.Parse((string)record["requestId"]); + ETag = new ETag((string)record["etag"]); + ContentType = (string)record["contentType"]; + ContentLength = (long)record["contentLength"]; + BlobType = ((string)record["blobType"]) switch + { + "BlockBlob" => BlobType.Block, + "PageBlob" => BlobType.Page, + "AppendBlob" => BlobType.Append, + _ => default + }; + record.TryGetValue("contentOffset", out object contentOffset); + ContentOffset = (long?)contentOffset; + record.TryGetValue("destinationUrl", out object destinationUrl); + DestinationUri = !string.IsNullOrEmpty((string)destinationUrl) ? new Uri((string)destinationUrl) : null; + record.TryGetValue("sourceUrl", out object sourceUrl); + SourceUri = !string.IsNullOrEmpty((string)sourceUrl) ? new Uri((string)sourceUrl) : null; + record.TryGetValue("url", out object url); + Uri = !string.IsNullOrEmpty((string)url) ? new Uri((string)url) : null; + record.TryGetValue("recursive", out object recursive); + Recursive = (bool?)recursive; + Sequencer = (string)record["sequencer"]; + } + + /// + /// The operation that triggered the event. + /// + public string Api { get; internal set; } + + /// + /// A client-provided request id for the storage API operation. This id can be used to correlate to Azure Storage + /// diagnostic logs using the "client-request-id" field in the logs, and can be provided in client requests using + /// the "x-ms-client-request-id" header. + /// + public Guid ClientRequestId { get; internal set; } + + /// + /// Service-generated request id for the storage API operation. Can be used to correlate to Azure Storage diagnostic + /// logs using the "request-id-header" field in the logs and is returned from initiating API call in the + /// 'x-ms-request-id' header. + /// + public Guid RequestId { get; internal set; } + + /// + /// The value that you can use to perform operations conditionally. + /// + public ETag ETag { get; internal set; } + + /// + /// The content type specified for the blob. + /// + public string ContentType { get; internal set; } + + /// + /// The size of the blob in bytes. + /// + public long ContentLength { get; internal set; } + + /// + /// The type of blob. Valid values are either BlockBlob or PageBlob. + /// + public BlobType BlobType { get; internal set; } + + /// + /// The offset in bytes of a write operation taken at the point where the event-triggering application completed + /// writing to the file. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public long? ContentOffset { get; internal set; } + + /// + /// The url of the file that will exist after the operation completes. For example, if a file is renamed, + /// the destinationUrl property contains the url of the new file name. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public Uri DestinationUri { get; internal set; } + + /// + /// The url of the file that exists prior to the operation. For example, if a file is renamed, the sourceUrl + /// contains the url of the original file name prior to the rename operation. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public Uri SourceUri { get; internal set; } + + /// + /// The path to the blob. + /// If the client uses a Blob REST API, then the url has this structure: + /// (storage-account-name).blob.core.windows.net/(container-name)/(file-name) + /// If the client uses a Data Lake Storage REST API, then the url has this structure: + /// (storage-account-name).dfs.core.windows.net/(file-system-name)/(file-name). + /// + public Uri Uri { get; internal set; } + + /// + /// True to perform the operation on all child directories; otherwise False. + /// Appears only for events triggered on blob storage accounts that have a hierarchical namespace. + /// + public bool? Recursive { get; internal set; } + + /// + /// An opaque string value representing the logical sequence of events for any particular blob name. + /// Users can use standard string comparison to understand the relative sequence of two events on the same blob name. + /// + public string Sequencer { get; internal set; } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs new file mode 100644 index 0000000000000..93a259896ef86 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + internal class BlobChangeFeedEventPage : Page + { + public override IReadOnlyList Values { get; } + public override string ContinuationToken { get; } + public override Response GetRawResponse() => null; + //private Response _raw; + + public BlobChangeFeedEventPage() { } + + public BlobChangeFeedEventPage(List events, string continuationToken) + { + Values = events; + ContinuationToken = continuationToken; + } + + // public BlobChangeFeedEventPage(Response raw, List data) + // { + // _raw = raw; + // ContinuationToken = null; + // var changes = new List(); + // foreach (GenericRecord value in data) + // { + // changes.Add(new BlobChangeFeedEvent(value)); + // } + // Values = changes; + // } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs new file mode 100644 index 0000000000000..c14fdef71666c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventType.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// BlobChangeFeedEventType. + /// + public enum BlobChangeFeedEventType + { + /// + /// Blob created. + /// + BlobCreated = 0, + + /// + /// Blob deleted. + /// + BlobDeleted = 1, + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs new file mode 100644 index 0000000000000..5ee16ae607562 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.ChangeFeed.Models +{ + /// + /// BlobChangeFeedModelFactory for building mock objects. + /// + public static class BlobChangeFeedModelFactory + { + /// + /// Creates a new BlobChangeFeedEvent instance for mocking. + /// + public static BlobChangeFeedEvent BlobChangeFeedEvent( + string topic, + string subject, + BlobChangeFeedEventType eventType, + DateTimeOffset eventTime, + Guid id, + BlobChangeFeedEventData eventData, + long dataVersion, + string metadataVersion) + => new BlobChangeFeedEvent + { + Topic = topic, + Subject = subject, + EventType = eventType, + EventTime = eventTime, + Id = id, + EventData = eventData, + DataVersion = dataVersion, + MetadataVersion = metadataVersion + }; + + /// + /// Creates a new BlobChangeFeedEventData instance for mocking. + /// + public static BlobChangeFeedEventData BlobChangeFeedEventData( + string api, + Guid clientRequestId, + Guid requestId, + ETag eTag, + string contentType, + long contentLength, + BlobType blobType, + long contentOffset, + Uri destinationUri, + Uri sourceUri, + Uri uri, + bool recursive, + string sequencer) + => new BlobChangeFeedEventData + { + Api = api, + ClientRequestId = clientRequestId, + RequestId = requestId, + ETag = eTag, + ContentType = contentType, + ContentLength = contentLength, + BlobType = blobType, + ContentOffset = contentOffset, + DestinationUri = destinationUri, + SourceUri = sourceUri, + Uri = uri, + Recursive = recursive, + Sequencer = sequencer + }; + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs new file mode 100644 index 0000000000000..1df14b52a189b --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + /// + /// Segment Cursor. + /// + internal class BlobChangeFeedSegmentCursor + { + /// + /// Shard Cursors. + /// + public List ShardCursors { get; set; } + + /// + /// Index of the current Shard. + /// + public int ShardIndex { get; set; } + + /// + /// The DateTimeOffset of the Segment. + /// + public DateTimeOffset SegmentTime { get; set; } + + internal BlobChangeFeedSegmentCursor( + DateTimeOffset segmentDateTime, + List shardCursors, + int shardIndex) + { + SegmentTime = segmentDateTime; + ShardCursors = shardCursors; + ShardIndex = shardIndex; + } + + public BlobChangeFeedSegmentCursor() { } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs new file mode 100644 index 0000000000000..f5524cd9525cf --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed.Models +{ + internal class BlobChangeFeedShardCursor + { + /// + /// Index of the current Chunk. + /// + public long ChunkIndex { get; set; } + + /// + /// The byte offset of the beginning of + /// the current Avro block. + /// + public long BlockOffset { get; set; } + + /// + /// The index of the current event within + /// the current Avro block. + /// + public long EventIndex { get; set; } + + internal BlobChangeFeedShardCursor( + long chunkIndex, + long blockOffset, + long eventIndex) + { + ChunkIndex = chunkIndex; + BlockOffset = blockOffset; + EventIndex = eventIndex; + } + + /// + /// + /// + public BlobChangeFeedShardCursor() { } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs new file mode 100644 index 0000000000000..35dca8f1b7c80 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core.Pipeline; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class Segment + { + /// + /// If this Segment is finalized. + /// + public bool Finalized { get; private set; } + + /// + /// The time (to the nearest hour) associated with this Segment. + /// + public DateTimeOffset DateTime { get; private set; } + + /// + /// Container client for listing Shards. + /// + private readonly BlobContainerClient _containerClient; + + /// + /// The path to the manifest for this Segment. + /// + private readonly string _manifestPath; + + /// + /// The Shards associated with this Segment. + /// + private readonly List _shards; + + /// + /// The index of the Shard we will return the next event from. + /// + private int _shardIndex; + + /// + /// If this Segement has been initalized. + /// + private bool _isInitalized; + + private BlobChangeFeedSegmentCursor _cursor; + + public Segment( + BlobContainerClient containerClient, + string manifestPath, + BlobChangeFeedSegmentCursor cursor = default) + { + _containerClient = containerClient; + _manifestPath = manifestPath; + DateTime = manifestPath.ToDateTimeOffset().Value; + _shards = new List(); + _cursor = cursor; + _shardIndex = cursor?.ShardIndex ?? 0; + } + + private async Task Initalize(bool async) + { + // Download segment manifest + BlobClient blobClient = _containerClient.GetBlobClient(_manifestPath); + BlobDownloadInfo blobDownloadInfo; + + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + // Parse segment manifest + JsonDocument jsonManifest; + + if (async) + { + jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); + } + + // Initalized Finalized field + string statusString = jsonManifest.RootElement.GetProperty("status").GetString(); + Finalized = statusString == "Finalized"; + + int i = 0; + foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) + { + //TODO cleanup this line + string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); + Shard shard = new Shard(_containerClient, shardPath, _cursor?.ShardCursors?[i]); + _shards.Add(shard); + i++; + } + _isInitalized = true; + } + + public BlobChangeFeedSegmentCursor GetCursor() + { + List shardCursors = new List(); + foreach (Shard shard in _shards) + { + shardCursors.Add(shard.GetCursor()); + } + return new BlobChangeFeedSegmentCursor( + segmentDateTime: DateTime, + shardCursors: shardCursors, + shardIndex: _shardIndex); + } + + public async Task> GetPage( + bool async, + int? pageSize) + { + List changeFeedEventList = new List(); + + if (!_isInitalized) + { + await Initalize(async).ConfigureAwait(false); + } + + if (!HasNext()) + { + throw new InvalidOperationException("Segment doesn't have any more events"); + } + + int i = 0; + while (i < pageSize && _shards.Count > 0) + { + Shard currentShard = _shards[_shardIndex]; + + BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async).ConfigureAwait(false); + + changeFeedEventList.Add(changeFeedEvent); + + // If the current shard is completed, remove it from _shards + if (!currentShard.HasNext()) + { + _shards.RemoveAt(_shardIndex); + } + + i++; + _shardIndex++; + if (_shardIndex >= _shards.Count) + { + _shardIndex = 0; + } + } + + //TODO how to get raw response for page? Does it matter? + return changeFeedEventList; + } + + //TODO figure out if this is right. + public bool HasNext() + { + if (!_isInitalized) + { + return true; + } + + return _shards.Count > 0; + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs new file mode 100644 index 0000000000000..41d583d6f52e7 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Blobs.ChangeFeed.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class Shard : IDisposable + { + /// + /// Container Client for listing Chunks. + /// + private readonly BlobContainerClient _containerClient; + + /// + /// The path to this Shard. + /// + private readonly string _shardPath; + + /// + /// Queue of the paths to Chunks we haven't processed. + /// + private readonly Queue _chunks; + + /// + /// The Chunk we are currently processing. + /// + private Chunk _currentChunk; + + /// + /// The index of the Chunk we are processing. + /// + private long _chunkIndex; + + /// + /// The byte offset of the beginning of the + /// current Avro block. + /// + private long _blockOffset; + + /// + /// Index of the current event within the + /// Avro block. + /// + private long _eventIndex; + + /// + /// If this Shard has been initalized. + /// + private bool _isInitialized; + + public Shard( + BlobContainerClient containerClient, + string shardPath, + BlobChangeFeedShardCursor shardCursor = default) + { + _containerClient = containerClient; + _shardPath = shardPath; + _chunks = new Queue(); + _isInitialized = false; + _chunkIndex = shardCursor?.ChunkIndex ?? 0; + _blockOffset = shardCursor?.BlockOffset ?? 0; + _eventIndex = shardCursor?.EventIndex ?? 0; + } + + private async Task Initalize(bool async) + { + // Get Chunks + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( + prefix: _shardPath).ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + _chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( + prefix: _shardPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + _chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + + // Fast forward to current Chunk + if (_chunkIndex > 0) + { + //TODO possible off by 1 error here. + for (int i = 0; i < _chunkIndex; i++) + { + _chunks.Dequeue(); + } + } + + _currentChunk = new Chunk( + _containerClient, + _chunks.Dequeue(), + _blockOffset, + _eventIndex); + _isInitialized = true; + } + + public BlobChangeFeedShardCursor GetCursor() + => new BlobChangeFeedShardCursor( + _chunkIndex, + _currentChunk.BlockOffset, + _currentChunk.EventIndex); + + public bool HasNext() + { + if (!_isInitialized) + { + return true; + } + + return _chunks.Count > 0 || _currentChunk.HasNext(); + } + + public async Task Next(bool async) + { + if (!_isInitialized) + { + await Initalize(async).ConfigureAwait(false); + } + + if (!HasNext()) + { + throw new InvalidOperationException("Shard doesn't have any more events"); + } + + BlobChangeFeedEvent changeFeedEvent; + + changeFeedEvent = await _currentChunk.Next(async).ConfigureAwait(false); + + // Remove currentChunk if it doesn't have another event. + if (!_currentChunk.HasNext() && _chunks.Count > 0) + { + _currentChunk = new Chunk(_containerClient, _chunks.Dequeue()); + _chunkIndex++; + } + return changeFeedEvent; + } + + /// + public void Dispose() => _currentChunk.Dispose(); + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj new file mode 100644 index 0000000000000..c4a3559bf805b --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj @@ -0,0 +1,20 @@ + + + $(RequiredTargetFrameworks) + + + Microsoft Azure.Storage.Blobs.ChangeFeed client library tests + false + + + + + + + + + + PreserveNewest + + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs new file mode 100644 index 0000000000000..9955c4fcf772c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase + { + public BlobChangeFeedAsyncPagableTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + //TODO better cursor tests + //TODO start and end time tests + //TODO page size tests + + [Test] + [Ignore("")] + public async Task Test() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(); + IList list = await blobChangeFeedAsyncPagable.ToListAsync(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + } + + [Test] + [Ignore("")] + public async Task PageSizeTest() + { + int pageSize = 100; + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + IAsyncEnumerator> asyncEnumerator + = blobChangeFeedClient.GetChangesAsync().AsPages(pageSizeHint: pageSize).GetAsyncEnumerator(); + List pageSizes = new List(); + while (await asyncEnumerator.MoveNextAsync()) + { + pageSizes.Add(asyncEnumerator.Current.Values.Count); + } + + // All pages except the last should have a count == pageSize. + for (int i = 0; i < pageSizes.Count - 1; i++) + { + Assert.AreEqual(pageSize, pageSizes[i]); + } + } + + [Test] + [Ignore("")] + public async Task CursorTest() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedAsyncPagable blobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(); + IAsyncEnumerable> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 500); + Page page = await asyncEnumerable.FirstAsync(); + foreach (BlobChangeFeedEvent changeFeedEvent in page.Values) + { + Console.WriteLine(changeFeedEvent); + } + + Console.WriteLine("break"); + + string continuation = page.ContinuationToken; + + BlobChangeFeedAsyncPagable cursorBlobChangeFeedAsyncPagable + = blobChangeFeedClient.GetChangesAsync(continuation); + + IList list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs new file mode 100644 index 0000000000000..22660f9829562 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedExtensionsTests + { + [Test] + public void ToDateTimeOffsetTests() + { + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700/meta.json".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 17, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/1700".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 2, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/02".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 11, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/11".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019/".ToDateTimeOffset()); + + Assert.AreEqual( + new DateTimeOffset(2019, 1, 1, 0, 0, 0, TimeSpan.Zero), + "idx/segments/2019".ToDateTimeOffset()); + + Assert.AreEqual( + null, + ((string)null).ToDateTimeOffset()); + } + + [Test] + public void RoundDownToNearestHourTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestHour()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundDownToNearestHour()); + } + + [Test] + public void RoundUpToNearestHourTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 21, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundUpToNearestHour()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundUpToNearestHour()); + } + + [Test] + public void RoundDownToNearestYearTests() + { + Assert.AreEqual( + new DateTimeOffset?( + new DateTimeOffset(2020, 1, 1, 0, 0, 0, TimeSpan.Zero)), + (new DateTimeOffset?( + new DateTimeOffset(2020, 03, 17, 20, 25, 30, TimeSpan.Zero))).RoundDownToNearestYear()); + + Assert.AreEqual( + null, + ((DateTimeOffset?)null).RoundDownToNearestYear()); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs new file mode 100644 index 0000000000000..af4ef366f228a --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Azure.Core.Testing; +using Azure.Storage.Blobs.ChangeFeed.Models; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class BlobChangeFeedPagableTests : ChangeFeedTestBase + { + public BlobChangeFeedPagableTests(bool async) + : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public void Test() + { + BlobServiceClient service = GetServiceClient_SharedKey(); + BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); + BlobChangeFeedPagable blobChangeFeedPagable + = blobChangeFeedClient.GetChanges(); + IList list = blobChangeFeedPagable.ToList(); + foreach (BlobChangeFeedEvent e in list) + { + Console.WriteLine(e); + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs new file mode 100644 index 0000000000000..c5e76693797f0 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Testing; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Test.Shared; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedTestBase : StorageTestBase + { + + public ChangeFeedTestBase(bool async) : this(async, null) { } + + public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) + : base(async, RecordedTestMode.Live) + { + } + + public string GetNewContainerName() => $"test-container-{Recording.Random.NewGuid()}"; + public string GetNewBlobName() => $"test-blob-{Recording.Random.NewGuid()}"; + + public BlobServiceClient GetServiceClient_SharedKey() + => InstrumentClient( + new BlobServiceClient( + new Uri(TestConfigDefault.BlobServiceEndpoint), + new StorageSharedKeyCredential( + TestConfigDefault.AccountName, + TestConfigDefault.AccountKey), + GetOptions())); + + public BlobClientOptions GetOptions() + { + var options = new BlobClientOptions + { + Diagnostics = { IsLoggingEnabled = true }, + Retry = + { + Mode = RetryMode.Exponential, + MaxRetries = Constants.MaxReliabilityRetries, + Delay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.01 : 0.5), + MaxDelay = TimeSpan.FromSeconds(Mode == RecordedTestMode.Playback ? 0.1 : 10) + }, + Transport = GetTransport() + }; + if (Mode != RecordedTestMode.Live) + { + options.AddPolicy(new RecordedClientRequestIdPolicy(Recording), HttpPipelinePosition.PerCall); + } + + return Recording.InstrumentClientOptions(options); + } + + public async Task GetTestContainerAsync( + BlobServiceClient service = default, + string containerName = default, + IDictionary metadata = default, + PublicAccessType? publicAccessType = default, + bool premium = default) + { + + containerName ??= GetNewContainerName(); + service ??= GetServiceClient_SharedKey(); + + if (publicAccessType == default) + { + publicAccessType = premium ? PublicAccessType.None : PublicAccessType.BlobContainer; + } + + BlobContainerClient container = InstrumentClient(service.GetBlobContainerClient(containerName)); + await container.CreateAsync(metadata: metadata, publicAccessType: publicAccessType.Value); + return new DisposingContainer(container); + } + + public class DisposingContainer : IAsyncDisposable + { + public BlobContainerClient Container; + + public DisposingContainer(BlobContainerClient client) + { + Container = client; + } + + public async ValueTask DisposeAsync() + { + if (Container != null) + { + try + { + await Container.DeleteAsync(); + Container = null; + } + catch + { + // swallow the exception to avoid hiding another test failure + } + } + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs new file mode 100644 index 0000000000000..ca57cd2895650 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedTests : ChangeFeedTestBase + { + public ChangeFeedTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public async Task GetYearPathsTest() + { + // Arrange + Mock containerClient = new Mock(); + ChangeFeed changeFeed = new ChangeFeed(containerClient.Object); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + // Act + Queue years = await changeFeed.GetYearPaths(IsAsync).ConfigureAwait(false); + + // Assert + Queue expectedYears = new Queue(); + expectedYears.Enqueue("idx/segments/2019/"); + expectedYears.Enqueue("idx/segments/2020/"); + expectedYears.Enqueue("idx/segments/2022/"); + expectedYears.Enqueue("idx/segments/2023/"); + Assert.AreEqual(expectedYears, years); + + } + + private static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); + + private static Page GetYearPathFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), + }); + + [Test] + public async Task GetSegmentsInYearTest() + { + // Arrange + Mock containerClient = new Mock(); + ChangeFeed changeFeed = new ChangeFeed(containerClient.Object); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(pageable); + } + + // Act + Queue segmentPaths = await changeFeed.GetSegmentsInYear( + IsAsync, + "idx/segments/2020/", + startTime: new DateTimeOffset(2020, 3, 3, 0, 0, 0, TimeSpan.Zero), + endTime: new DateTimeOffset(2020, 3, 3, 22, 0 , 0, TimeSpan.Zero)); + + // Assert + Queue expectedSegmentPaths = new Queue(); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/0000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/1800/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2200/meta.json"); + + Assert.AreEqual(expectedSegmentPaths, segmentPaths); + } + + private static Task> GetSegmentsInYearFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); + + private static Page GetSegmentsInYearFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), + }); + + private class BlobHierarchyItemPage : Page + { + private List _items; + + public BlobHierarchyItemPage(List items) + { + _items = items; + } + + public override IReadOnlyList Values => _items; + + public override string ContinuationToken => null; + + public override Response GetRawResponse() + { + throw new NotImplementedException(); + } + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs new file mode 100644 index 0000000000000..b3423a65fe220 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using Azure.Storage.Test; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class LazyLoadingBlobStreamTests : ChangeFeedTestBase + { + public LazyLoadingBlobStreamTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public async Task ReadAsync() + { + // Arrange + await using DisposingContainer test = await GetTestContainerAsync(); + + // Arrange + int length = Constants.KB; + byte[] exectedData = GetRandomBuffer(length); + BlobClient blobClient = InstrumentClient(test.Container.GetBlobClient(GetNewBlobName())); + using (var stream = new MemoryStream(exectedData)) + { + await blobClient.UploadAsync(stream); + } + LazyLoadingBlobStream lazyStream = new LazyLoadingBlobStream(blobClient, offset: 0, blockSize: 157); + byte[] actualData = new byte[length]; + int offset = 0; + + // Act + int count = 0; + while (offset + count < length) + { + for (count = 6; count < 37; count += 6) + { + await lazyStream.ReadAsync(actualData, offset, count); + offset += count; + } + } + await lazyStream.ReadAsync(actualData, offset, length - offset); + + // Assert + TestHelper.AssertSequenceEqual(exectedData, actualData); + } + + [Test] + public async Task ReadAsync_InvalidParameterTests() + { + // Arrange + BlobClient blobClient = new BlobClient(new Uri("https://www.doesntmatter.com")); + LazyLoadingBlobStream lazyStream = new LazyLoadingBlobStream(blobClient, offset: 0, blockSize: Constants.KB); + + // Act + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: null, offset: 0, count: 10), + e => Assert.AreEqual($"buffer cannot be null.{Environment.NewLine}Parameter name: buffer", e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: -1, count: 10), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset cannot be less than 0.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 11, count: 10), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset cannot exceed buffer length.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 1, count: -1), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: count cannot be less than 0.", + e.Message)); + + await TestHelper.AssertExpectedExceptionAsync( + lazyStream.ReadAsync(buffer: new byte[10], offset: 5, count: 15), + e => Assert.AreEqual( + $"Specified argument was out of the range of valid values.{Environment.NewLine}Parameter name: offset + count cannot exceed buffer length.", + e.Message)); + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs index c20ffa0125545..a70af2f95ef7c 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs @@ -300,6 +300,19 @@ internal static class Queue public const string UriSubDomain = "queue"; } + /// + /// ChangeFeed constant values. + /// + internal static class ChangeFeed + { + public const string ChangeFeedContainerName = "$blobchangefeed"; + public const string SegmentPrefix = "idx/segments/"; + public const string InitalizationManifestPath = "/0000/"; + public const string InitalizationSegment = "1601"; + public const string MetaSegmentsPath = "meta/segments.json"; + public const long ChunkBlockDownloadSize = MB; + } + /// /// Sas constant values. /// diff --git a/sdk/storage/Azure.Storage.sln b/sdk/storage/Azure.Storage.sln index bd5413ab22aab..457049afc1c66 100644 --- a/sdk/storage/Azure.Storage.sln +++ b/sdk/storage/Azure.Storage.sln @@ -119,6 +119,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Internal.Avro EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Internal.Avro.Tests", "Azure.Storage.Internal.Avro\tests\Azure.Storage.Internal.Avro.Tests.csproj", "{A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed", "Azure.Storage.Blobs.ChangeFeed\src\Azure.Storage.Blobs.ChangeFeed.csproj", "{5EA89BEF-6367-41DD-A10F-246E0D3FDA55}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed.Tests", "Azure.Storage.Blobs.ChangeFeed\tests\Azure.Storage.Blobs.ChangeFeed.Tests.csproj", "{0780564C-4096-45B4-8DEF-132EE7CB2CF8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Storage.Blobs.ChangeFeed.Samples.Tests", "Azure.Storage.Blobs.ChangeFeed\samples\Azure.Storage.Blobs.ChangeFeed.Samples.Tests.csproj", "{5F7C7873-0E11-468C-8045-5163B068FC16}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -221,6 +227,18 @@ Global {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Debug|Any CPU.Build.0 = Debug|Any CPU {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Release|Any CPU.ActiveCfg = Release|Any CPU {A7FEC0AC-9A90-4F12-A260-B0B63E57D9DA}.Release|Any CPU.Build.0 = Release|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5EA89BEF-6367-41DD-A10F-246E0D3FDA55}.Release|Any CPU.Build.0 = Release|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0780564C-4096-45B4-8DEF-132EE7CB2CF8}.Release|Any CPU.Build.0 = Release|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5F7C7873-0E11-468C-8045-5163B068FC16}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE From 821c4339d6c1b7edd4ce427938c4a831b3f4d702 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 30 Apr 2020 15:29:44 -0700 Subject: [PATCH 02/30] Added Chunk unit tests --- .../src/Chunk.cs | 31 ++-- .../src/Models/BlobChangeFeedEvent.cs | 18 +- .../src/Models/BlobChangeFeedEventData.cs | 34 ++-- .../tests/BlobChangeFeedAsyncPagableTests.cs | 5 +- .../tests/ChunkTests.cs | 161 ++++++++++++++++++ .../src/Shared/Constants.cs | 32 ++++ .../src/Shared/StorageVersionExtensions.cs | 2 +- .../src/AvroReader.cs | 13 +- 8 files changed, 251 insertions(+), 45 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index 6a4bde1822b97..27c48092d905d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -23,29 +23,28 @@ internal class Chunk : IDisposable /// /// Avro Reader to parser the Events. /// - private AvroReader _avroReader; + private readonly AvroReader _avroReader; /// - /// The byte offset of the beginning of the current - /// Block. + /// Data stream. /// - public long BlockOffset { get; private set; } + private readonly Stream _dataStream; /// - /// The index of the Event within the current block. + /// Avro head stream. /// - public long EventIndex { get; private set; } - + private readonly Stream _headStream; /// - /// Data stream. + /// The byte offset of the beginning of the current + /// Block. /// - private LazyLoadingBlobStream _dataStream; + public long BlockOffset { get; private set; } /// - /// Avro head stream. + /// The index of the Event within the current block. /// - private Stream _headStream; + public long EventIndex { get; private set; } public Chunk( BlobContainerClient containerClient, @@ -82,6 +81,14 @@ public Chunk( } } + /// + /// Constructor for testing. Do not use. + /// + internal Chunk(AvroReader avroReader) + { + _avroReader = avroReader; + } + //TODO what if the Segment isn't Finalized?? public bool HasNext() => _avroReader.HasNext(); @@ -108,4 +115,4 @@ public void Dispose() GC.SuppressFinalize(this); } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs index ddf5eee4c4fe0..ea0f595e30258 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEvent.cs @@ -19,15 +19,15 @@ public class BlobChangeFeedEvent /// internal BlobChangeFeedEvent(Dictionary record) { - Topic = (string)record["topic"]; - Subject = (string)record["subject"]; - EventType = ToBlobChangeFeedEventType((string)record["eventType"]); - EventTime = DateTimeOffset.Parse((string)record["eventTime"], CultureInfo.InvariantCulture); - Id = Guid.Parse((string)record["id"]); - EventData = new BlobChangeFeedEventData((Dictionary)record["data"]); - record.TryGetValue("dataVersion", out object dataVersion); + Topic = (string)record[Constants.ChangeFeed.Event.Topic]; + Subject = (string)record[Constants.ChangeFeed.Event.Subject]; + EventType = ToBlobChangeFeedEventType((string)record[Constants.ChangeFeed.Event.EventType]); + EventTime = DateTimeOffset.Parse((string)record[Constants.ChangeFeed.Event.EventTime], CultureInfo.InvariantCulture); + Id = Guid.Parse((string)record[Constants.ChangeFeed.Event.EventId]); + EventData = new BlobChangeFeedEventData((Dictionary)record[Constants.ChangeFeed.Event.Data]); + record.TryGetValue(Constants.ChangeFeed.Event.DataVersion, out object dataVersion); DataVersion = (long?)dataVersion; - record.TryGetValue("metadataVersion", out object metadataVersion); + record.TryGetValue(Constants.ChangeFeed.Event.MetadataVersion, out object metadataVersion); MetadataVersion = (string)metadataVersion; } @@ -90,4 +90,4 @@ private static BlobChangeFeedEventType ToBlobChangeFeedEventType(string s) } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs index 5786b5d63c336..8edb9b1a2d4d0 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventData.cs @@ -19,30 +19,30 @@ internal BlobChangeFeedEventData() { } internal BlobChangeFeedEventData(Dictionary record) { - Api = ((string)record["api"]); - ClientRequestId = Guid.Parse((string)record["clientRequestId"]); - RequestId = Guid.Parse((string)record["requestId"]); - ETag = new ETag((string)record["etag"]); - ContentType = (string)record["contentType"]; - ContentLength = (long)record["contentLength"]; - BlobType = ((string)record["blobType"]) switch + Api = ((string)record[Constants.ChangeFeed.EventData.Api]); + ClientRequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.ClientRequestId]); + RequestId = Guid.Parse((string)record[Constants.ChangeFeed.EventData.RequestId]); + ETag = new ETag((string)record[Constants.ChangeFeed.EventData.Etag]); + ContentType = (string)record[Constants.ChangeFeed.EventData.ContentType]; + ContentLength = (long)record[Constants.ChangeFeed.EventData.ContentLength]; + BlobType = ((string)record[Constants.ChangeFeed.EventData.BlobType]) switch { - "BlockBlob" => BlobType.Block, - "PageBlob" => BlobType.Page, - "AppendBlob" => BlobType.Append, + Constants.ChangeFeed.EventData.BlockBlob => BlobType.Block, + Constants.ChangeFeed.EventData.PageBlob => BlobType.Page, + Constants.ChangeFeed.EventData.AppendBlob => BlobType.Append, _ => default }; - record.TryGetValue("contentOffset", out object contentOffset); + record.TryGetValue(Constants.ChangeFeed.EventData.ContentOffset, out object contentOffset); ContentOffset = (long?)contentOffset; - record.TryGetValue("destinationUrl", out object destinationUrl); + record.TryGetValue(Constants.ChangeFeed.EventData.DestinationUrl, out object destinationUrl); DestinationUri = !string.IsNullOrEmpty((string)destinationUrl) ? new Uri((string)destinationUrl) : null; - record.TryGetValue("sourceUrl", out object sourceUrl); + record.TryGetValue(Constants.ChangeFeed.EventData.SourceUrl, out object sourceUrl); SourceUri = !string.IsNullOrEmpty((string)sourceUrl) ? new Uri((string)sourceUrl) : null; - record.TryGetValue("url", out object url); + record.TryGetValue(Constants.ChangeFeed.EventData.Url, out object url); Uri = !string.IsNullOrEmpty((string)url) ? new Uri((string)url) : null; - record.TryGetValue("recursive", out object recursive); + record.TryGetValue(Constants.ChangeFeed.EventData.Recursive, out object recursive); Recursive = (bool?)recursive; - Sequencer = (string)record["sequencer"]; + Sequencer = (string)record[Constants.ChangeFeed.EventData.Sequencer]; } /// @@ -126,4 +126,4 @@ internal BlobChangeFeedEventData(Dictionary record) /// public string Sequencer { get; internal set; } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index 9955c4fcf772c..ba1f3801fbec0 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Text; using System.Threading.Tasks; +using Azure.Core.Testing; using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; @@ -14,7 +15,7 @@ namespace Azure.Storage.Blobs.ChangeFeed.Tests public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase { public BlobChangeFeedAsyncPagableTests(bool async) - : base(async, null /* RecordedTestMode.Record /* to re-record */) + : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) { } @@ -23,7 +24,7 @@ public BlobChangeFeedAsyncPagableTests(bool async) //TODO page size tests [Test] - [Ignore("")] + //[Ignore("")] public async Task Test() { BlobServiceClient service = GetServiceClient_SharedKey(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs new file mode 100644 index 0000000000000..1892a04fcacb4 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Internal.Avro; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChunkTests : ChangeFeedTestBase + { + public ChunkTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public void HasNext_True() + { + // Arrange + Mock avroReader = new Mock(MockBehavior.Strict); + avroReader.Setup(r => r.HasNext()).Returns(true); + Chunk chunk = new Chunk(avroReader.Object); + + // Act + bool hasNext = chunk.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + avroReader.Verify(r => r.HasNext()); + } + + [Test] + public void HasNext_False() + { + // Arrange + Mock avroReader = new Mock(MockBehavior.Strict); + avroReader.Setup(r => r.HasNext()).Returns(false); + Chunk chunk = new Chunk(avroReader.Object); + + // Act + bool hasNext = chunk.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + avroReader.Verify(r => r.HasNext()); + } + + [Test] + public async Task Next() + { + // Arrange + long blockOffset = 5; + long objectIndex = 10; + + string topic = "topic"; + string subject = "subject"; + string eventType = "BlobCreated"; + DateTimeOffset eventTime = new DateTimeOffset(2020, 4, 30, 8, 26, 30, TimeSpan.Zero); + Guid eventId = Guid.NewGuid(); + long dataVersion = 1; + string metadataVersion = "1"; + + string api = "CreateBlob"; + Guid clientRequestId = Guid.NewGuid(); + Guid requestId = Guid.NewGuid(); + ETag etag = new ETag("0x8D75EF45A3B8617"); + string contentType = "contentType"; + long contentLength = Constants.KB; + string blobType = "BlockBlob"; + long contentOffset = 5; + Uri destinationUri = new Uri("https://www.destination.com"); + Uri sourceUri = new Uri("https://www.source.com"); + Uri uri = new Uri("https://www.uri.com"); + bool recursive = true; + string sequencer = "sequencer"; + + Dictionary record = new Dictionary + { + { Constants.ChangeFeed.Event.Topic, topic }, + { Constants.ChangeFeed.Event.Subject, subject }, + { Constants.ChangeFeed.Event.EventType, eventType }, + { Constants.ChangeFeed.Event.EventTime, eventTime.ToString() }, + { Constants.ChangeFeed.Event.EventId, eventId.ToString() }, + { Constants.ChangeFeed.Event.DataVersion, dataVersion }, + { Constants.ChangeFeed.Event.MetadataVersion, metadataVersion }, + { Constants.ChangeFeed.Event.Data, new Dictionary + { + { Constants.ChangeFeed.EventData.Api, api }, + { Constants.ChangeFeed.EventData.ClientRequestId, clientRequestId.ToString() }, + { Constants.ChangeFeed.EventData.RequestId, requestId.ToString() }, + { Constants.ChangeFeed.EventData.Etag, etag.ToString() }, + { Constants.ChangeFeed.EventData.ContentType, contentType }, + { Constants.ChangeFeed.EventData.ContentLength, contentLength }, + { Constants.ChangeFeed.EventData.BlobType, blobType }, + { Constants.ChangeFeed.EventData.ContentOffset, contentOffset }, + { Constants.ChangeFeed.EventData.DestinationUrl, destinationUri.ToString() }, + { Constants.ChangeFeed.EventData.SourceUrl, sourceUri.ToString() }, + { Constants.ChangeFeed.EventData.Url, uri.ToString() }, + { Constants.ChangeFeed.EventData.Recursive, recursive }, + { Constants.ChangeFeed.EventData.Sequencer, sequencer } + } + } + }; + + Mock avroReader = new Mock(MockBehavior.Strict); + + avroReader.Setup(r => r.HasNext()).Returns(true); + + avroReader.Setup(r => r.Next( + It.IsAny(), + It.IsAny())) + .ReturnsAsync(record); + + avroReader.Setup(r => r.BlockOffset).Returns(blockOffset); + avroReader.Setup(r => r.ObjectIndex).Returns(objectIndex); + + Chunk chunk = new Chunk(avroReader.Object); + + // Act + BlobChangeFeedEvent changeFeedEvent = await chunk.Next(IsAsync); + + // Assert + Assert.AreEqual(topic, changeFeedEvent.Topic); + Assert.AreEqual(subject, changeFeedEvent.Subject); + Assert.AreEqual(BlobChangeFeedEventType.BlobCreated, changeFeedEvent.EventType); + Assert.AreEqual(eventTime, changeFeedEvent.EventTime); + Assert.AreEqual(eventId, changeFeedEvent.Id); + Assert.AreEqual(dataVersion, changeFeedEvent.DataVersion); + Assert.AreEqual(metadataVersion, changeFeedEvent.MetadataVersion); + + Assert.AreEqual(api, changeFeedEvent.EventData.Api); + Assert.AreEqual(clientRequestId, changeFeedEvent.EventData.ClientRequestId); + Assert.AreEqual(requestId, changeFeedEvent.EventData.RequestId); + Assert.AreEqual(etag, changeFeedEvent.EventData.ETag); + Assert.AreEqual(contentType, changeFeedEvent.EventData.ContentType); + Assert.AreEqual(contentLength, changeFeedEvent.EventData.ContentLength); + Assert.AreEqual(BlobType.Block, changeFeedEvent.EventData.BlobType); + Assert.AreEqual(contentOffset, changeFeedEvent.EventData.ContentOffset); + Assert.AreEqual(destinationUri, changeFeedEvent.EventData.DestinationUri); + Assert.AreEqual(sourceUri, changeFeedEvent.EventData.SourceUri); + Assert.AreEqual(uri, changeFeedEvent.EventData.Uri); + Assert.AreEqual(recursive, changeFeedEvent.EventData.Recursive); + Assert.AreEqual(sequencer, changeFeedEvent.EventData.Sequencer); + + avroReader.Verify(r => r.HasNext()); + avroReader.Verify(r => r.Next( + IsAsync, + default)); + avroReader.Verify(r => r.BlockOffset); + avroReader.Verify(r => r.ObjectIndex); + } + } +} diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs index a70af2f95ef7c..1744fea65581b 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs @@ -311,6 +311,38 @@ internal static class ChangeFeed public const string InitalizationSegment = "1601"; public const string MetaSegmentsPath = "meta/segments.json"; public const long ChunkBlockDownloadSize = MB; + + internal static class Event + { + public const string Topic = "topic"; + public const string Subject = "subject"; + public const string EventType = "eventType"; + public const string EventTime = "eventTime"; + public const string EventId = "id"; + public const string Data = "data"; + public const string DataVersion = "dataVersion"; + public const string MetadataVersion = "metadataVersion"; + } + + internal static class EventData + { + public const string Api = "api"; + public const string ClientRequestId = "clientRequestId"; + public const string RequestId = "requestId"; + public const string Etag = "etag"; + public const string ContentType = "contentType"; + public const string ContentLength = "contentLength"; + public const string BlobType = "blobType"; + public const string BlockBlob = "BlockBlob"; + public const string PageBlob = "pageBlob"; + public const string AppendBlob = "AppendBlob"; + public const string ContentOffset = "contentOffset"; + public const string DestinationUrl = "destinationUrl"; + public const string SourceUrl = "sourceUrl"; + public const string Url = "url"; + public const string Recursive = "recursive"; + public const string Sequencer = "sequencer"; + } } /// diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs index ff480ae550a65..9de33dc548600 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs @@ -51,7 +51,7 @@ public static string ToVersionString(this ServiceVersion version) => ServiceVersion.V2019_02_02 => "2019-02-02", ServiceVersion.V2019_07_07 => "2019-07-07", // TODO this is temporary until 73 goes to stage. - ServiceVersion.V2019_12_12 => "2019-12-12", + ServiceVersion.V2019_12_12 => "2019-10-10", #elif QueueSDK // Queues just bumped the version number without changing the swagger ServiceVersion.V2019_02_02 => "2018-11-09", diff --git a/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs b/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs index 089cb0c264276..e180a2066017c 100644 --- a/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs +++ b/sdk/storage/Azure.Storage.Internal.Avro/src/AvroReader.cs @@ -47,13 +47,13 @@ internal class AvroReader /// The byte offset within the Avro file (both header and data) /// of the start of the current block. /// - public long BlockOffset { get; private set; } + public virtual long BlockOffset { get; private set; } /// /// The index of the current object within the current block. /// /// - public long ObjectIndex { get; private set; } + public virtual long ObjectIndex { get; private set; } /// /// If this Avro Reader has been initalized. @@ -91,6 +91,11 @@ public AvroReader( _initalized = false; } + /// + /// Constructor for mocking. Do not use. + /// + public AvroReader() { } + private async Task Initalize(bool async, CancellationToken cancellationToken = default) { // Four bytes, ASCII 'O', 'b', 'j', followed by 1. @@ -141,9 +146,9 @@ private async Task Initalize(bool async, CancellationToken cancellationToken = d } } - public bool HasNext() => !_initalized || _itemsRemainingInBlock > 0; + public virtual bool HasNext() => !_initalized || _itemsRemainingInBlock > 0; - public async Task Next(bool async, CancellationToken cancellationToken = default) + public virtual async Task Next(bool async, CancellationToken cancellationToken = default) { // Initialize AvroReader, if necessary. if (!_initalized) From e1e48b8a6723927049d68929097a65508e402938 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 30 Apr 2020 16:40:42 -0700 Subject: [PATCH 03/30] Added Shard tests --- .../src/Chunk.cs | 29 +-- .../src/Shard.cs | 29 ++- .../tests/ChangeFeedTestBase.cs | 21 ++- .../tests/ChangeFeedTests.cs | 21 +-- .../tests/ShardTests.cs | 165 ++++++++++++++++++ 5 files changed, 227 insertions(+), 38 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index 27c48092d905d..1024446122383 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -39,12 +39,12 @@ internal class Chunk : IDisposable /// The byte offset of the beginning of the current /// Block. /// - public long BlockOffset { get; private set; } + public virtual long BlockOffset { get; private set; } /// /// The index of the Event within the current block. /// - public long EventIndex { get; private set; } + public virtual long EventIndex { get; private set; } public Chunk( BlobContainerClient containerClient, @@ -81,19 +81,11 @@ public Chunk( } } - /// - /// Constructor for testing. Do not use. - /// - internal Chunk(AvroReader avroReader) - { - _avroReader = avroReader; - } - //TODO what if the Segment isn't Finalized?? - public bool HasNext() + public virtual bool HasNext() => _avroReader.HasNext(); - public async Task Next(bool async) + public virtual async Task Next(bool async) { Dictionary result; @@ -114,5 +106,18 @@ public void Dispose() _headStream.Dispose(); GC.SuppressFinalize(this); } + + /// + /// Constructor for testing. Do not use. + /// + internal Chunk(AvroReader avroReader) + { + _avroReader = avroReader; + } + + /// + /// Constructor for mocking. Do not use. + /// + internal Chunk() { } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index 41d583d6f52e7..cd3b8b119bd68 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -38,15 +38,17 @@ internal class Shard : IDisposable /// /// The byte offset of the beginning of the - /// current Avro block. + /// current Avro block. Only used to initalize a + /// Shard from a Sursor. /// - private long _blockOffset; + private readonly long _blockOffset; /// /// Index of the current event within the - /// Avro block. + /// Avro block. Only used to initalize a + /// Shard from a Sursor. /// - private long _eventIndex; + private readonly long _eventIndex; /// /// If this Shard has been initalized. @@ -156,5 +158,22 @@ public async Task Next(bool async) /// public void Dispose() => _currentChunk.Dispose(); + + /// + /// Constructor for testing. Do not use. + /// + internal Shard( + Chunk chunk = default, + long chunkIndex = default, + bool isInitalized = default, + Queue chunks = default, + BlobContainerClient containerClient = default) + { + _currentChunk = chunk; + _chunkIndex = chunkIndex; + _isInitialized = isInitalized; + _chunks = chunks; + _containerClient = containerClient; + } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index c5e76693797f0..ebccf9de1b710 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -102,5 +102,24 @@ public async ValueTask DisposeAsync() } } } + + public class BlobHierarchyItemPage : Page + { + private List _items; + + public BlobHierarchyItemPage(List items) + { + _items = items; + } + + public override IReadOnlyList Values => _items; + + public override string ContinuationToken => null; + + public override Response GetRawResponse() + { + throw new NotImplementedException(); + } + } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index ca57cd2895650..8494d5ed8bcf3 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -159,24 +159,5 @@ private static Page GetSegmentsInYearFunc( null, BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), }); - - private class BlobHierarchyItemPage : Page - { - private List _items; - - public BlobHierarchyItemPage(List items) - { - _items = items; - } - - public override IReadOnlyList Values => _items; - - public override string ContinuationToken => null; - - public override Response GetRawResponse() - { - throw new NotImplementedException(); - } - } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs new file mode 100644 index 0000000000000..ead10c4cb651e --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Moq; +using NUnit.Framework; +using System.Threading.Tasks; +using Azure.Storage.Blobs.Models; +using Azure.Core; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ShardTests : ChangeFeedTestBase + { + public ShardTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public void GetCursor() + { + // Arrange + long chunkIndex = 5; + long blockOffset = 100; + long eventIndex = 200; + + Mock chunk = new Mock(MockBehavior.Strict); + + chunk.Setup(r => r.BlockOffset).Returns(blockOffset); + chunk.Setup(r => r.EventIndex).Returns(eventIndex); + + Shard shard = new Shard(chunk.Object, chunkIndex); + + // Act + BlobChangeFeedShardCursor cursor = shard.GetCursor(); + + // Assert + Assert.AreEqual(chunkIndex, cursor.ChunkIndex); + Assert.AreEqual(blockOffset, cursor.BlockOffset); + Assert.AreEqual(eventIndex, cursor.EventIndex); + + chunk.Verify(r => r.BlockOffset); + chunk.Verify(r => r.EventIndex); + } + + [Test] + public void HasNext_NotInitalizes() + { + // Arrange + Shard shard = new Shard(isInitalized: false); + + // Act + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + } + + [Test] + public void HasNext_False() + { + // Arrange + Mock chunk = new Mock(MockBehavior.Strict); + chunk.Setup(r => r.HasNext()).Returns(false); + + Queue chunks = new Queue(); + Shard shard = new Shard( + chunk.Object, + isInitalized: true, + chunks: chunks); + + // Act + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + + chunk.Verify(r => r.HasNext()); + } + + [Test] + public void HasNext_ChunksLeft() + { + // Arrange + Queue chunks = new Queue(); + chunks.Enqueue("chunk"); + Shard shard = new Shard( + isInitalized: true, + chunks: chunks); + + // Act + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + } + + [Test] + public void HasNext_CurrentChunkHasNext() + { + // Arrange + Mock chunk = new Mock(MockBehavior.Strict); + chunk.Setup(r => r.HasNext()).Returns(true); + + Shard shard = new Shard( + chunk: chunk.Object, + isInitalized: true, + chunks: new Queue()); + + // Act + bool hasNext = shard.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + + chunk.Verify(r => r.HasNext()); + } + + [Test] + public async Task Next() + { + // Arrange + Guid eventId = Guid.NewGuid(); + BlobChangeFeedEvent expectedChangeFeedEvent = new BlobChangeFeedEvent + { + Id = eventId + }; + string secondChunkName = "chunk"; + + Mock chunk = new Mock(MockBehavior.Strict); + chunk.Setup(r => r.HasNext()).Returns(true); + chunk.Setup(r => r.Next(It.IsAny())).Returns(Task.FromResult(expectedChangeFeedEvent)); + chunk.Setup(r => r.HasNext()).Returns(false); + + Mock containerClient = new Mock(MockBehavior.Strict); + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns((new Mock()).Object); + + Queue chunks = new Queue(); + chunks.Enqueue(secondChunkName); + Shard shard = new Shard( + chunk: chunk.Object, + isInitalized: true, + chunks: chunks, + containerClient: containerClient.Object); + + // Act + BlobChangeFeedEvent changeFeedEvent = await shard.Next(IsAsync); + BlobChangeFeedShardCursor cursor = shard.GetCursor(); + + // Assert + Assert.AreEqual(eventId, changeFeedEvent.Id); + Assert.AreEqual(1, cursor.ChunkIndex); + + chunk.Verify(r => r.HasNext()); + chunk.Verify(r => r.Next(IsAsync)); + chunk.Verify(r => r.HasNext()); + + containerClient.Verify(r => r.GetBlobClient(secondChunkName)); + } + } +} From a83820458d392b6813be7a7456682ee4f1eab584 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 30 Apr 2020 17:34:04 -0700 Subject: [PATCH 04/30] Added some Segment tests --- .../src/ChangeFeed.cs | 12 +-- ...hangeFeedCursor.cs => ChangeFeedCursor.cs} | 12 +-- ...eFeedSegmentCursor.cs => SegmentCursor.cs} | 12 +-- ...hangeFeedShardCursor.cs => ShardCursor.cs} | 8 +- .../src/Segment.cs | 25 +++++-- .../src/Shard.cs | 15 ++-- .../tests/SegmentTests.cs | 73 +++++++++++++++++++ .../tests/ShardTests.cs | 4 +- 8 files changed, 127 insertions(+), 34 deletions(-) rename sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/{BlobChangeFeedCursor.cs => ChangeFeedCursor.cs} (79%) rename sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/{BlobChangeFeedSegmentCursor.cs => SegmentCursor.cs} (75%) rename sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/{BlobChangeFeedShardCursor.cs => ShardCursor.cs} (87%) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 273dad09de74f..850784488bb86 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -36,7 +36,7 @@ internal class ChangeFeed /// private Segment _currentSegment; - private readonly BlobChangeFeedSegmentCursor _currentSegmentCursor; + private readonly SegmentCursor _currentSegmentCursor; /// /// The latest time the Change Feed can safely be read from. @@ -80,7 +80,7 @@ public ChangeFeed( string continutation) { _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); - BlobChangeFeedCursor cursor = JsonSerializer.Deserialize(continutation); + ChangeFeedCursor cursor = JsonSerializer.Deserialize(continutation); ValidateCursor(_containerClient, cursor); _years = new Queue(); _segments = new Queue(); @@ -222,7 +222,7 @@ public async Task> GetPage( await AdvanceSegmentIfNecessary(async).ConfigureAwait(false); } - return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor())); + return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor())); } @@ -251,8 +251,8 @@ public DateTimeOffset LastConsumable() return _lastConsumable; } - internal BlobChangeFeedCursor GetCursor() - => new BlobChangeFeedCursor( + internal ChangeFeedCursor GetCursor() + => new ChangeFeedCursor( urlHash: _containerClient.Uri.ToString().GetHashCode(), endDateTime: _endTime, currentSegmentCursor: _currentSegment.GetCursor()); @@ -374,7 +374,7 @@ private static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTim private static void ValidateCursor( BlobContainerClient containerClient, - BlobChangeFeedCursor cursor) + ChangeFeedCursor cursor) { if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash) { diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs similarity index 79% rename from sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs rename to sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs index 75e627e5292b5..8036b2427bb36 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedCursor.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ChangeFeedCursor.cs @@ -10,7 +10,7 @@ namespace Azure.Storage.Blobs.ChangeFeed.Models /// /// BlobChangeFeedCursor. /// - internal class BlobChangeFeedCursor + internal class ChangeFeedCursor { /// /// CursorVersion. @@ -30,12 +30,12 @@ internal class BlobChangeFeedCursor /// /// The Segment Cursor for the current segment. /// - public BlobChangeFeedSegmentCursor CurrentSegmentCursor { get; set; } + public SegmentCursor CurrentSegmentCursor { get; set; } - internal BlobChangeFeedCursor( + internal ChangeFeedCursor( long urlHash, DateTimeOffset? endDateTime, - BlobChangeFeedSegmentCursor currentSegmentCursor) + SegmentCursor currentSegmentCursor) { CursorVersion = 1; UrlHash = urlHash; @@ -43,6 +43,6 @@ internal BlobChangeFeedCursor( CurrentSegmentCursor = currentSegmentCursor; } - public BlobChangeFeedCursor() { } + public ChangeFeedCursor() { } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs similarity index 75% rename from sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs rename to sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs index 1df14b52a189b..a732ce776dfe1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedSegmentCursor.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/SegmentCursor.cs @@ -10,12 +10,12 @@ namespace Azure.Storage.Blobs.ChangeFeed.Models /// /// Segment Cursor. /// - internal class BlobChangeFeedSegmentCursor + internal class SegmentCursor { /// /// Shard Cursors. /// - public List ShardCursors { get; set; } + public List ShardCursors { get; set; } /// /// Index of the current Shard. @@ -27,9 +27,9 @@ internal class BlobChangeFeedSegmentCursor /// public DateTimeOffset SegmentTime { get; set; } - internal BlobChangeFeedSegmentCursor( + internal SegmentCursor( DateTimeOffset segmentDateTime, - List shardCursors, + List shardCursors, int shardIndex) { SegmentTime = segmentDateTime; @@ -37,6 +37,6 @@ internal BlobChangeFeedSegmentCursor( ShardIndex = shardIndex; } - public BlobChangeFeedSegmentCursor() { } + public SegmentCursor() { } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs similarity index 87% rename from sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs rename to sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs index f5524cd9525cf..b7ff67c17ec21 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedShardCursor.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/ShardCursor.cs @@ -7,7 +7,7 @@ namespace Azure.Storage.Blobs.ChangeFeed.Models { - internal class BlobChangeFeedShardCursor + internal class ShardCursor { /// /// Index of the current Chunk. @@ -26,7 +26,7 @@ internal class BlobChangeFeedShardCursor /// public long EventIndex { get; set; } - internal BlobChangeFeedShardCursor( + internal ShardCursor( long chunkIndex, long blockOffset, long eventIndex) @@ -39,6 +39,6 @@ internal BlobChangeFeedShardCursor( /// /// /// - public BlobChangeFeedShardCursor() { } + public ShardCursor() { } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 35dca8f1b7c80..d391605a6b709 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -50,12 +50,12 @@ internal class Segment /// private bool _isInitalized; - private BlobChangeFeedSegmentCursor _cursor; + private SegmentCursor _cursor; public Segment( BlobContainerClient containerClient, string manifestPath, - BlobChangeFeedSegmentCursor cursor = default) + SegmentCursor cursor = default) { _containerClient = containerClient; _manifestPath = manifestPath; @@ -108,14 +108,14 @@ private async Task Initalize(bool async) _isInitalized = true; } - public BlobChangeFeedSegmentCursor GetCursor() + public SegmentCursor GetCursor() { - List shardCursors = new List(); + List shardCursors = new List(); foreach (Shard shard in _shards) { shardCursors.Add(shard.GetCursor()); } - return new BlobChangeFeedSegmentCursor( + return new SegmentCursor( segmentDateTime: DateTime, shardCursors: shardCursors, shardIndex: _shardIndex); @@ -174,5 +174,20 @@ public bool HasNext() return _shards.Count > 0; } + + /// + /// Constructor for testing. Do not use. + /// + internal Segment( + bool isInitalized = default, + List shards = default, + int shardIndex = default, + DateTimeOffset dateTime = default) + { + _isInitalized = isInitalized; + _shards = shards; + _shardIndex = shardIndex; + DateTime = dateTime; + } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index cd3b8b119bd68..9d78cfa317e26 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -58,7 +58,7 @@ internal class Shard : IDisposable public Shard( BlobContainerClient containerClient, string shardPath, - BlobChangeFeedShardCursor shardCursor = default) + ShardCursor shardCursor = default) { _containerClient = containerClient; _shardPath = shardPath; @@ -115,13 +115,13 @@ private async Task Initalize(bool async) _isInitialized = true; } - public BlobChangeFeedShardCursor GetCursor() - => new BlobChangeFeedShardCursor( + public virtual ShardCursor GetCursor() + => new ShardCursor( _chunkIndex, _currentChunk.BlockOffset, _currentChunk.EventIndex); - public bool HasNext() + public virtual bool HasNext() { if (!_isInitialized) { @@ -131,7 +131,7 @@ public bool HasNext() return _chunks.Count > 0 || _currentChunk.HasNext(); } - public async Task Next(bool async) + public virtual async Task Next(bool async) { if (!_isInitialized) { @@ -175,5 +175,10 @@ internal Shard( _chunks = chunks; _containerClient = containerClient; } + + /// + /// Constructor for mocking. Do not use. + /// + internal Shard() { } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs new file mode 100644 index 0000000000000..6f298267e8494 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class SegmentTests + { + [Test] + public void HasNext_NotInitalized() + { + // Arrange + Segment segment = new Segment(isInitalized: false); + + // Act + bool hasNext = segment.HasNext(); + + // Assert + Assert.IsTrue(hasNext); + } + + [Test] + public void HasNext_False() + { + // Arrange + List shards = new List(); + Segment segment = new Segment( + isInitalized: true, + shards: shards); + + // Act + bool hasNext = segment.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + } + + [Test] + public void GetCursor() + { + // Arrange + DateTimeOffset dateTime = DateTimeOffset.UtcNow; + int shardIndex = 4; + Mock shard = new Mock(MockBehavior.Strict); + Mock shardCursor = new Mock(MockBehavior.Strict); + shard.Setup(r => r.GetCursor()).Returns(shardCursor.Object); + List shards = new List + { + shard.Object + }; + Segment segment = new Segment( + isInitalized: true, + shards: shards, + shardIndex: shardIndex, + dateTime: dateTime); + + // Act + SegmentCursor cursor = segment.GetCursor(); + + // Assert + Assert.AreEqual(dateTime, cursor.SegmentTime); + Assert.AreEqual(1, cursor.ShardCursors.Count); + Assert.AreEqual(shardCursor.Object, cursor.ShardCursors[0]); + Assert.AreEqual(shardIndex, cursor.ShardIndex); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index ead10c4cb651e..cc38c9e361435 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -36,7 +36,7 @@ public void GetCursor() Shard shard = new Shard(chunk.Object, chunkIndex); // Act - BlobChangeFeedShardCursor cursor = shard.GetCursor(); + ShardCursor cursor = shard.GetCursor(); // Assert Assert.AreEqual(chunkIndex, cursor.ChunkIndex); @@ -149,7 +149,7 @@ public async Task Next() // Act BlobChangeFeedEvent changeFeedEvent = await shard.Next(IsAsync); - BlobChangeFeedShardCursor cursor = shard.GetCursor(); + ShardCursor cursor = shard.GetCursor(); // Assert Assert.AreEqual(eventId, changeFeedEvent.Id); From 2e0c2c2aeaba0c3468031abd9d573240d27a9b4a Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 30 Apr 2020 17:35:32 -0700 Subject: [PATCH 05/30] changed service version back to 12-12 --- .../tests/BlobChangeFeedAsyncPagableTests.cs | 4 ++-- .../src/Shared/StorageVersionExtensions.cs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index ba1f3801fbec0..66aeb01d3afa2 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -15,7 +15,7 @@ namespace Azure.Storage.Blobs.ChangeFeed.Tests public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase { public BlobChangeFeedAsyncPagableTests(bool async) - : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) + : base(async, null /* RecordedTestMode.Record /* to re-record */) { } @@ -24,7 +24,7 @@ public BlobChangeFeedAsyncPagableTests(bool async) //TODO page size tests [Test] - //[Ignore("")] + [Ignore("")] public async Task Test() { BlobServiceClient service = GetServiceClient_SharedKey(); diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs index 9de33dc548600..ff480ae550a65 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs @@ -51,7 +51,7 @@ public static string ToVersionString(this ServiceVersion version) => ServiceVersion.V2019_02_02 => "2019-02-02", ServiceVersion.V2019_07_07 => "2019-07-07", // TODO this is temporary until 73 goes to stage. - ServiceVersion.V2019_12_12 => "2019-10-10", + ServiceVersion.V2019_12_12 => "2019-12-12", #elif QueueSDK // Queues just bumped the version number without changing the swagger ServiceVersion.V2019_02_02 => "2018-11-09", From 460887c8194b97b9d71dc2364844e6318932796c Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 1 May 2020 14:12:19 -0700 Subject: [PATCH 06/30] Added another Segment test --- .../src/ChangeFeed.cs | 11 +- .../src/Chunk.cs | 7 +- .../src/Segment.cs | 6 +- .../src/Shard.cs | 7 +- .../tests/ChangeFeedTests.cs | 4 +- .../tests/SegmentTests.cs | 105 +++++++++++++++++- .../tests/ShardTests.cs | 4 +- 7 files changed, 129 insertions(+), 15 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 850784488bb86..e15d8c54fe317 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -11,6 +11,7 @@ using Azure.Storage.Blobs; using Azure.Storage.Blobs.Models; using Azure.Storage.Blobs.ChangeFeed.Models; +using System.Threading; namespace Azure.Storage.Blobs.ChangeFeed { @@ -184,7 +185,8 @@ private async Task Initalize(bool async) // The last segment may still be adding chunks. public async Task> GetPage( bool async, - int pageSize = 512) + int pageSize = 512, + CancellationToken cancellationToken = default) { if (!_isInitalized) { @@ -216,7 +218,10 @@ public async Task> GetPage( && HasNext()) { //TODO what if segment doesn't have a page size worth of data? - List newEvents = await _currentSegment.GetPage(async, remainingEvents).ConfigureAwait(false); + List newEvents = await _currentSegment.GetPage( + async, + remainingEvents, + cancellationToken).ConfigureAwait(false); blobChangeFeedEvents.AddRange(newEvents); remainingEvents -= newEvents.Count; await AdvanceSegmentIfNecessary(async).ConfigureAwait(false); @@ -382,4 +387,4 @@ private static void ValidateCursor( } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index 1024446122383..bcaa87e474fc1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading; using System.Threading.Tasks; using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Internal.Avro; @@ -85,7 +86,9 @@ public Chunk( public virtual bool HasNext() => _avroReader.HasNext(); - public virtual async Task Next(bool async) + public virtual async Task Next( + bool async, + CancellationToken cancellationToken = default) { Dictionary result; @@ -94,7 +97,7 @@ public virtual async Task Next(bool async) return null; } - result = (Dictionary)await _avroReader.Next(async).ConfigureAwait(false); + result = (Dictionary)await _avroReader.Next(async, cancellationToken).ConfigureAwait(false); BlockOffset = _avroReader.BlockOffset; EventIndex = _avroReader.ObjectIndex; return new BlobChangeFeedEvent(result); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index d391605a6b709..9226e4523ad3d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Text; using System.Text.Json; +using System.Threading; using System.Threading.Tasks; using Azure.Core.Pipeline; using Azure.Storage.Blobs; @@ -123,7 +124,8 @@ public SegmentCursor GetCursor() public async Task> GetPage( bool async, - int? pageSize) + int? pageSize, + CancellationToken cancellationToken = default) { List changeFeedEventList = new List(); @@ -142,7 +144,7 @@ public async Task> GetPage( { Shard currentShard = _shards[_shardIndex]; - BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async).ConfigureAwait(false); + BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async, cancellationToken).ConfigureAwait(false); changeFeedEventList.Add(changeFeedEvent); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index 9d78cfa317e26..1125b35b62b9c 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -6,6 +6,7 @@ using System.Threading.Tasks; using Azure.Storage.Blobs.Models; using Azure.Storage.Blobs.ChangeFeed.Models; +using System.Threading; namespace Azure.Storage.Blobs.ChangeFeed { @@ -131,7 +132,9 @@ public virtual bool HasNext() return _chunks.Count > 0 || _currentChunk.HasNext(); } - public virtual async Task Next(bool async) + public virtual async Task Next( + bool async, + CancellationToken cancellationToken = default) { if (!_isInitialized) { @@ -145,7 +148,7 @@ public virtual async Task Next(bool async) BlobChangeFeedEvent changeFeedEvent; - changeFeedEvent = await _currentChunk.Next(async).ConfigureAwait(false); + changeFeedEvent = await _currentChunk.Next(async, cancellationToken).ConfigureAwait(false); // Remove currentChunk if it doesn't have another event. if (!_currentChunk.HasNext() && _chunks.Count > 0) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 8494d5ed8bcf3..2ffe1b67aaedf 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -29,7 +29,7 @@ public async Task GetYearPathsTest() if (IsAsync) { - AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, @@ -41,7 +41,7 @@ public async Task GetYearPathsTest() else { Pageable pageable = - PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 6f298267e8494..85a978ed27ab8 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -3,15 +3,20 @@ using System; using System.Collections.Generic; -using System.Text; +using System.Threading.Tasks; using Azure.Storage.Blobs.ChangeFeed.Models; using Moq; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Tests { - public class SegmentTests + public class SegmentTests : ChangeFeedTestBase { + public SegmentTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + [Test] public void HasNext_NotInitalized() { @@ -69,5 +74,101 @@ public void GetCursor() Assert.AreEqual(shardCursor.Object, cursor.ShardCursors[0]); Assert.AreEqual(shardIndex, cursor.ShardIndex); } + + /// + /// In this test, the Segment has 3 Shards and 5 total Events. + /// Shard index 0 and 1 have 2 Events, and Shard index 2 has 1 Event. + /// We are round-robining the Shards, so we will return the events for + /// the shards indexes: 0 1 2 0 1. + /// + [Test] + public async Task GetPage() + { + // Arrange + int eventCount = 5; + int shardCount = 3; + + List eventIds = new List(); + for (int i = 0; i < eventCount; i++) + { + eventIds.Add(Guid.NewGuid()); + } + + List> mockShards = new List>(); + + for (int i = 0; i (MockBehavior.Strict)); + } + + // Set up Shards + mockShards[0].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[0] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[3] + })); + + mockShards[0].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + mockShards[1].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[1] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[4] + })); + + mockShards[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + mockShards[2].Setup(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[2] + })); + + mockShards[2].Setup(r => r.HasNext()) + .Returns(false); + + List shards = new List(); + for (int i = 0; i < shardCount; i++) + { + shards.Add(mockShards[i].Object); + } + + Segment segment = new Segment( + isInitalized: true, + shards: shards); + + // Act + List events = await segment.GetPage(IsAsync, 25); + + // Assert + Assert.AreEqual(eventCount, events.Count); + for (int i = 0; i < eventCount; i++) + { + Assert.AreEqual(eventIds[i], events[i].Id); + } + + mockShards[0].Verify(r => r.Next(IsAsync, default)); + mockShards[0].Verify(r => r.HasNext()); + mockShards[1].Verify(r => r.Next(IsAsync, default)); + mockShards[1].Verify(r => r.HasNext()); + mockShards[2].Verify(r => r.Next(IsAsync, default)); + mockShards[2].Verify(r => r.HasNext()); + mockShards[0].Verify(r => r.Next(IsAsync, default)); + mockShards[0].Verify(r => r.HasNext()); + mockShards[1].Verify(r => r.Next(IsAsync, default)); + mockShards[1].Verify(r => r.HasNext()); + } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index cc38c9e361435..83b6d1f0d209d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -133,7 +133,7 @@ public async Task Next() Mock chunk = new Mock(MockBehavior.Strict); chunk.Setup(r => r.HasNext()).Returns(true); - chunk.Setup(r => r.Next(It.IsAny())).Returns(Task.FromResult(expectedChangeFeedEvent)); + chunk.Setup(r => r.Next(It.IsAny(), default)).Returns(Task.FromResult(expectedChangeFeedEvent)); chunk.Setup(r => r.HasNext()).Returns(false); Mock containerClient = new Mock(MockBehavior.Strict); @@ -156,7 +156,7 @@ public async Task Next() Assert.AreEqual(1, cursor.ChunkIndex); chunk.Verify(r => r.HasNext()); - chunk.Verify(r => r.Next(IsAsync)); + chunk.Verify(r => r.Next(IsAsync, default)); chunk.Verify(r => r.HasNext()); containerClient.Verify(r => r.GetBlobClient(secondChunkName)); From fb2129573bc2022c7724af4f884d173bd8c6d129 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 1 May 2020 14:19:30 -0700 Subject: [PATCH 07/30] Recorded tests --- .../src/ChangeFeed.cs | 1 - .../tests/BlobChangeFeedPagableTests.cs | 5 +- .../tests/ChangeFeedTestBase.cs | 2 +- .../GetSegmentsInYearTest.json | 4 + .../GetSegmentsInYearTestAsync.json | 4 + .../ChangeFeedTests/GetYearPathsTest.json | 4 + .../GetYearPathsTestAsync.json | 4 + .../ChunkTests/HasNext_False.json | 4 + .../ChunkTests/HasNext_FalseAsync.json | 4 + .../ChunkTests/HasNext_True.json | 4 + .../ChunkTests/HasNext_TrueAsync.json | 4 + .../tests/SessionRecords/ChunkTests/Next.json | 4 + .../SessionRecords/ChunkTests/NextAsync.json | 4 + .../LazyLoadingBlobStreamTests/ReadAsync.json | 418 ++++++++++++++++++ .../ReadAsyncAsync.json | 418 ++++++++++++++++++ .../ReadAsync_InvalidParameterTests.json | 4 + .../ReadAsync_InvalidParameterTestsAsync.json | 4 + .../SegmentTests/GetCursor.json | 4 + .../SegmentTests/GetCursorAsync.json | 4 + .../SessionRecords/SegmentTests/GetPage.json | 4 + .../SegmentTests/GetPageAsync.json | 4 + .../SegmentTests/HasNext_False.json | 4 + .../SegmentTests/HasNext_FalseAsync.json | 4 + .../SegmentTests/HasNext_NotInitalized.json | 4 + .../HasNext_NotInitalizedAsync.json | 4 + .../SessionRecords/ShardTests/GetCursor.json | 4 + .../ShardTests/GetCursorAsync.json | 4 + .../ShardTests/HasNext_ChunksLeft.json | 4 + .../ShardTests/HasNext_ChunksLeftAsync.json | 4 + .../HasNext_CurrentChunkHasNext.json | 4 + .../HasNext_CurrentChunkHasNextAsync.json | 4 + .../ShardTests/HasNext_False.json | 4 + .../ShardTests/HasNext_FalseAsync.json | 4 + .../ShardTests/HasNext_NotInitalizes.json | 4 + .../HasNext_NotInitalizesAsync.json | 4 + .../tests/SessionRecords/ShardTests/Next.json | 4 + .../SessionRecords/ShardTests/NextAsync.json | 4 + 37 files changed, 968 insertions(+), 4 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index e15d8c54fe317..d5c351a55c64d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -316,7 +316,6 @@ private async Task AdvanceSegmentIfNecessary(bool async) } // If _segments is empty, refill it - // TODO pull this out into private method else if (_segments.Count == 0 && _years.Count > 0) { string yearPath = _years.Dequeue(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs index af4ef366f228a..86e3ea02abb06 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs @@ -14,11 +14,12 @@ namespace Azure.Storage.Blobs.ChangeFeed.Tests public class BlobChangeFeedPagableTests : ChangeFeedTestBase { public BlobChangeFeedPagableTests(bool async) - : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) + : base(async, null /* RecordedTestMode.Record /* to re-record */) { } [Test] + [Ignore("")] public void Test() { BlobServiceClient service = GetServiceClient_SharedKey(); @@ -32,4 +33,4 @@ BlobChangeFeedPagable blobChangeFeedPagable } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index ebccf9de1b710..ca002b689a82f 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -18,7 +18,7 @@ public class ChangeFeedTestBase : StorageTestBase public ChangeFeedTestBase(bool async) : this(async, null) { } public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) - : base(async, RecordedTestMode.Live) + : base(async, mode) { } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetSegmentsInYearTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetYearPathsTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_True.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/HasNext_TrueAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/Next.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChunkTests/NextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json new file mode 100644 index 0000000000000..a04d62f548b2f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync.json @@ -0,0 +1,418 @@ +{ + "Entries": [ + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90?restype=container", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-29b6b9e18beba24ea43664ce40b1b41c-7635994c96806c49-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-public-access": "container", + "x-ms-client-request-id": "37d9033a-5b52-e093-6ae2-6fc8d4b9a256", + "x-ms-date": "Fri, 01 May 2020 21:18:02 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:02 GMT", + "ETag": "\u00220x8D7EE152212CB76\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:02 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "37d9033a-5b52-e093-6ae2-6fc8d4b9a256", + "x-ms-request-id": "b2b67fd9-b01e-0095-06fd-1fa515000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "Content-Length": "1024", + "If-None-Match": "*", + "traceparent": "00-8056c19e6e2cbc4f90faa1c4a421cd3d-b1a6adc2fb891f4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "154dcba4-b4de-b434-753a-926f59fb8342", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": "hunS36L2h2Xh0pWCMKtts3PsZ7irlz59mXeGfwCSvT5VTtkbKk3ZQ3x0VYDGEK\u002B\u002BdbxV6FLpqu6rVUYrcAdlTxRWLeHRUYglEyM8N0K\u002Bt4nHxwlEPdXra6PrcNUHrtN0DwZrb\u002BPN7SQ0L9QvrXOr7J29Y6n\u002BIBE\u002Ba8E95uCyHmVQxyc8Ukel\u002BtrlDw0AvL4wP4M3SUopdu70LQQijwZDgRFv3uZLb5pVF2nPGBADPq4CDadce9QaJ1A5SCVDg2rToo78kN7Y5M/p4TLgWV7d8O9WH4Fe5wU8VbeVEid1EpT/\u002B3aCB63QKinXc5miOc3yXFc27WPodLuIwDsmQtGoc8\u002Bd4V3nWuhBhfVYh3NsmRvqorXPPyQwF4e7\u002BUdxqedXL//KBFhvCwFu9GdukO1HHCYgPJ8c2DSqbcrWLGGh0NwNAFd0opXDepezYEu9woaZdNwhUG1HO1fKH5eNmd6gzSOLYI\u002BV0nqQXMEHaHfp7KpDPLeEgH9vxcUAqYL9NeG5E5MnMmH3BKFikPSrQNLf4TPsSlwmG0ykG5rKlSj3kyfgTlXjwrpJTDgWIu\u002BLxIXyIMhPEeuEkvhGgxbGLG54av3MyvF9\u002Bx54o/4kEPlcGFcV2HRgqIE8W7xdnD6jgh/JnbfyvvmUdSSp1m7tYIL/6Wq/sVT6GMJ7kG41IF/f1SDeWnJBalldgWJPttETTvo6XbPU49HLz4X0di6LXlXFyQFNDgYPvX8RYi/xqmLxaEFz\u002BsKsC/FOEortTfMr0iINiezQxUD0KqAzwoGh2SUjAw7sF2STGb0Q//t8InCL5pAN5CB1IaIBf9AieElwrDllyUnkPru\u002BeW8uwMZEeAgvp9zxlPQEreJs65eTcKF0n1LCFlgnpqjVT2qwRmHbvQfH8tBsfxhEYRxmeFIjKgjOuSB0gWGwCAE0G0WJU2aBkS6QzH7QCCA5frU8wC5VsSRi4oxAZjulF\u002BrPOGPTug215eYkk12K/bH/OgOrdEfS32ogJ3HaQCry6rWTAdC9ZsOohttHMB7G1\u002BTbQCw4\u002BaKi/Fj4spOc26S8Mqio9vzhQtRAA\u002B1siq5LWVCM/gD\u002BuvES8QR3xLMwC6s3iv52HuDo\u002Bf5X\u002BPW0NnKqrVlCD9ykvni3lQX3an4gEPhCNf\u002Bv1TpvowUBpsH7XbuSzBZa7fvPksyGvona8Y5/N\u002Bf9cgBy\u002Btce0mvWohi2vDdKZJ5nxiL8cqIzKRaxlbJ3gm/a/eu6INE/PBqP\u002BPC00AJSkIzwF3DFAf9ePtHBbT25Xx\u002Bo3L5MkETMKIoIbHqxflgTXwQmhmr\u002BFPzrXOH/fs/c1pnQC/REBQBHJO/mvZSFrMB5rQ04k9i1qsfBYTNjzKq7K5Xh4Q==", + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Content-MD5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "Date": "Fri, 01 May 2020 21:18:02 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "154dcba4-b4de-b434-753a-926f59fb8342", + "x-ms-content-crc64": "sFHvOXWQOCg=", + "x-ms-request-id": "b2b67fff-b01e-0095-28fd-1fa515000000", + "x-ms-request-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-07535d5ba97159438e3e5004dd10816d-f099c446f5756e45-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "e5c169c5-71d4-0df2-6f8f-4ae1cebd2aa9", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=0-156", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 0-156/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "e5c169c5-71d4-0df2-6f8f-4ae1cebd2aa9", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68008-b01e-0095-30fd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "hunS36L2h2Xh0pWCMKtts3PsZ7irlz59mXeGfwCSvT5VTtkbKk3ZQ3x0VYDGEK\u002B\u002BdbxV6FLpqu6rVUYrcAdlTxRWLeHRUYglEyM8N0K\u002Bt4nHxwlEPdXra6PrcNUHrtN0DwZrb\u002BPN7SQ0L9QvrXOr7J29Y6n\u002BIBE\u002Ba8E95uCyHmVQxyc8Ukel\u002BtrlDw0AvL4wP4M3SUopdu70LQQijw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-3016bd7d6293504b956a0541cb89c807-3d54089df7699c4c-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "f0a9a604-9d39-d46e-e6ec-17c2d5e44278", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=157-313", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 157-313/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "f0a9a604-9d39-d46e-e6ec-17c2d5e44278", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6802f-b01e-0095-55fd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "BkOBEW/e5ktvmlUXac8YEAM\u002BrgINp1x71BonUDlIJUODatOijvyQ3tjkz\u002BnhMuBZXt3w71YfgV7nBTxVt5USJ3USlP/7doIHrdAqKddzmaI5zfJcVzbtY\u002Bh0u4jAOyZC0ahzz53hXeda6EGF9ViHc2yZG\u002Bqitc8/JDAXh7v5R3Gp51cv/8oEWG8LAW70Z26Q7UccJiA8nxzYNKptyg==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-0fe3d3744d71064ea107d1648e7348ac-b7947c7b3fb7d948-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "27dcdfd5-3a14-ab6f-333d-8545f3eaad21", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=314-470", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 314-470/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "27dcdfd5-3a14-ab6f-333d-8545f3eaad21", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68038-b01e-0095-5dfd-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "1ixhodDcDQBXdKKVw3qXs2BLvcKGmXTcIVBtRztXyh\u002BXjZneoM0ji2CPldJ6kFzBB2h36eyqQzy3hIB/b8XFAKmC/TXhuROTJzJh9wShYpD0q0DS3\u002BEz7EpcJhtMpBuaypUo95Mn4E5V48K6SUw4FiLvi8SF8iDITxHrhJL4RoMWxixueGr9zMrxffseeKP\u002BJBD5XBhXFdh0YKiBPA==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-c252f3489f4e9844a13ff531b9ec3daa-25b062df34267d4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "f6eda3df-1b07-e764-9b42-25bc4375c7d2", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=471-627", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 471-627/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "f6eda3df-1b07-e764-9b42-25bc4375c7d2", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68049-b01e-0095-6bfe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "W7xdnD6jgh/JnbfyvvmUdSSp1m7tYIL/6Wq/sVT6GMJ7kG41IF/f1SDeWnJBalldgWJPttETTvo6XbPU49HLz4X0di6LXlXFyQFNDgYPvX8RYi/xqmLxaEFz\u002BsKsC/FOEortTfMr0iINiezQxUD0KqAzwoGh2SUjAw7sF2STGb0Q//t8InCL5pAN5CB1IaIBf9AieElwrDllyUnkPg==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-fcc9ad0ca7995f4da7568fee4bbcdead-15d355a94228a74e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "5066fefa-b7fc-dd19-0189-58ca3bb5f877", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=628-784", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 628-784/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "5066fefa-b7fc-dd19-0189-58ca3bb5f877", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6804c-b01e-0095-6efe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "u755by7AxkR4CC\u002Bn3PGU9ASt4mzrl5NwoXSfUsIWWCemqNVParBGYdu9B8fy0Gx/GERhHGZ4UiMqCM65IHSBYbAIATQbRYlTZoGRLpDMftAIIDl\u002BtTzALlWxJGLijEBmO6UX6s84Y9O6DbXl5iSTXYr9sf86A6t0R9LfaiAncdpAKvLqtZMB0L1mw6iG20cwHsbX5NtALDj5oqL8WA==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-6a7127f21ef82743991e88106d432734-2a061e027738f345-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "45c1c755-770e-d7d5-a4c3-2b64c09c39b8", + "x-ms-date": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-range": "bytes=785-941", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 785-941/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "45c1c755-770e-d7d5-a4c3-2b64c09c39b8", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b68057-b01e-0095-79fe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "\u002BLKTnNukvDKoqPb84ULUQAPtbIquS1lQjP4A/rrxEvEEd8SzMAurN4r\u002Bdh7g6Pn\u002BV/j1tDZyqq1ZQg/cpL54t5UF92p\u002BIBD4QjX/r9U6b6MFAabB\u002B127kswWWu37z5LMhr6J2vGOfzfn/XIAcvrXHtJr1qIYtrw3SmSeZ8Yi/HKiMykWsZWyd4Jv2v3ruiDRPzwaj/jwtNACUpCM8A==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90/test-blob-59cf6373-de04-7a62-1757-6865b02186d0", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-4d8d0c28cb9f104e8dd0061cf69c7b38-32a27dc310b00c4f-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "747431a8-b2f3-af68-8c94-5b71eb3305a0", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=942-1098", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "82", + "Content-Range": "bytes 942-1023/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE15222A21EC\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "J/NgEeGJUkEwKuO4lOzaQQ==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "747431a8-b2f3-af68-8c94-5b71eb3305a0", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:03 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "b2b6807b-b01e-0095-1cfe-1fa515000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:03.1134979Z" + }, + "ResponseBody": "F3DFAf9ePtHBbT25Xx\u002Bo3L5MkETMKIoIbHqxflgTXwQmhmr\u002BFPzrXOH/fs/c1pnQC/REBQBHJO/mvZSFrMB5rQ04k9i1qsfBYTNjzKq7K5Xh4Q==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-8d643cd6-d57a-720b-b0a0-68a2bc84ce90?restype=container", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-f65c24cb0ab5a644b1cdb308cb000d29-eb5a238f2825e048-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "8fbc69c5-c427-f5c5-5a4e-187291b7f932", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 202, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "8fbc69c5-c427-f5c5-5a4e-187291b7f932", + "x-ms-request-id": "b2b68096-b01e-0095-37fe-1fa515000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + } + ], + "Variables": { + "RandomSeed": "792381840", + "Storage_TestConfigDefault": "ProductionTenant\nseanmcccanary\nU2FuaXRpemVk\nhttps://seanmcccanary.blob.core.windows.net\nhttps://seanmcccanary.file.core.windows.net\nhttps://seanmcccanary.queue.core.windows.net\nhttps://seanmcccanary.table.core.windows.net\n\n\n\n\nhttps://seanmcccanary-secondary.blob.core.windows.net\nhttps://seanmcccanary-secondary.file.core.windows.net\nhttps://seanmcccanary-secondary.queue.core.windows.net\nhttps://seanmcccanary-secondary.table.core.windows.net\n\nSanitized\n\n\nCloud\nBlobEndpoint=https://seanmcccanary.blob.core.windows.net/;QueueEndpoint=https://seanmcccanary.queue.core.windows.net/;FileEndpoint=https://seanmcccanary.file.core.windows.net/;BlobSecondaryEndpoint=https://seanmcccanary-secondary.blob.core.windows.net/;QueueSecondaryEndpoint=https://seanmcccanary-secondary.queue.core.windows.net/;FileSecondaryEndpoint=https://seanmcccanary-secondary.file.core.windows.net/;AccountName=seanmcccanary;AccountKey=Sanitized\nseanscope1" + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json new file mode 100644 index 0000000000000..3f44471337f09 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsyncAsync.json @@ -0,0 +1,418 @@ +{ + "Entries": [ + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9?restype=container", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ab3ce3ce807b344bb4f46c41c58e4a63-b15c2830fb726b4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-public-access": "container", + "x-ms-client-request-id": "3e9eb20a-7b8e-30d1-ba02-498361189a39", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE1522F2D80E\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "3e9eb20a-7b8e-30d1-ba02-498361189a39", + "x-ms-request-id": "099de8d5-e01e-0043-14fe-1fabfc000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "PUT", + "RequestHeaders": { + "Authorization": "Sanitized", + "Content-Length": "1024", + "If-None-Match": "*", + "traceparent": "00-d8a03fb949beb4419d83cde01f21fe05-4c41fdfcc7972a49-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "4d391c40-2c2a-907d-3446-b186ad21ec62", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": "Xvph0NiOB\u002BUXICslttuQJWXp6tVol1sN9VHlbLkDFfOR5l7yjlrafKu4J25Qq3nfciQdeC7F0MeeVNuHqZssEwo4lNB9nZcLDm4pr3DtvUnXBvqV5p3FyxWXVDl\u002BU9MQjBNjGBepYdSMEWq/nCFXsZeSr6Khli1tcsm75kldjbqaGSJciaUxLqQlCdQzBS/0P6Ki7KMwb/bm9qVO7nuLLoSvJYL\u002B\u002BifyYIH\u002Be4xZOW2Zx6amuqAXaqosdpP/40AomtpPg\u002BCs9UR7sIwhMvNpjbw4tCbV5HTkl7rY0YSU8n1B1eGyCbaM7NFhKgoU9Ti2wunWxy4ZU7Vma\u002Bsl1CPA5r4O4FwStoMGq8dVc7lpz6KCIQ6bvStOKIIrTJ3KZhitoUNMBRXbpAtl0Btmw9/7/JxJo7IVJSmSwLfa5mrmPEWvqNJ1HcmMpn0CNe0Q52SjME8AgMNlmHwauUVdAb2hfpPWtxsNhd7Svfw1Eu87gthTN7Ya5ryBSQ9VW2SELqXAYbdZOi9JlapxLsPHMIaN3jdsCsoGhpjXVG4Ltyp6TY5WdLA44keoCaEMqbj5uhmJyHwsGCTaudoegeXBPd5/9\u002BA0cLvVVfzvwwAp\u002Bt3g3ppVge6uTTVpe7rVGWBh58vCjLHXxElds3N/PFQjsb/9PqJLbLvw0amqyvQA/zubjtw1Z20rCBpQ9816LhHhRsr62Nq\u002BFnoP0boILflVXLQ45K537kpS0TWI\u002B3TtdqOYyYQqBidh3XhhJZWyI9sg\u002BpsZQkliaVh/qAekNRoGIlsXcGR1K1aN1gY2Fsm0n65aZwDZ1DRZNV8sql0fvKddWwI6m7J3q89FjGtDLFOOyghYSs4aLE0nvTsESYD4bRpe2LrGegT747swk0fIFggMnd9UxnLZQy60Et13cmJOTB9R5FOZyFAEXWYS6ZgGXqcy\u002BKuho6anhZWLEWgmhHBZ4kcndzK1ctWPUGkLxlqVrrQ2RXq2rKh4qV6\u002BDrbo3tj1uAdR3ud1fIXWElM8m7469WO07z72ozWer83T9mQep3GQFg1TRM/Mwcj035pVPA2ggxhTM\u002B6a/xXRsy8ZoYYhuGvhrIWw57GT5y52AsObSXPAM3NLynOD9hb4gZCgsx9C0ycuc4\u002BH75tnwEzAI/xSNmq8WW4S1e34XrVvuGYg8zQOMy\u002BbbNXjOETX5AomwtIZpUom41mwL/06NhXZGDY1R80Yse/Nt8qV2SEqO/Hj0ZvjVRIb\u002BQ/aXQQxYTa/CR8NoZw8DhDiTO6AkQo4hvMMVmR69sMjGk6\u002BPVPrnFWT1c8BHZlUp8OKAAgkmmcEodcQJArSRRJb6YZOftC997CY7Lw6NmhZt6\u002Bg7/W7/3/muZx82g==", + "StatusCode": 201, + "ResponseHeaders": { + "Content-Length": "0", + "Content-MD5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "Date": "Fri, 01 May 2020 21:18:03 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "4d391c40-2c2a-907d-3446-b186ad21ec62", + "x-ms-content-crc64": "Bps5Y\u002BRozCA=", + "x-ms-request-id": "099de8e2-e01e-0043-1efe-1fabfc000000", + "x-ms-request-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": [] + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ae29abac71f6c14187f12b46be8059b9-43868325c9b26b4c-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "ed8cbe63-3d1f-7f83-4bda-3a6fbbd48434", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=0-156", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 0-156/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "ed8cbe63-3d1f-7f83-4bda-3a6fbbd48434", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de8f4-e01e-0043-30fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "Xvph0NiOB\u002BUXICslttuQJWXp6tVol1sN9VHlbLkDFfOR5l7yjlrafKu4J25Qq3nfciQdeC7F0MeeVNuHqZssEwo4lNB9nZcLDm4pr3DtvUnXBvqV5p3FyxWXVDl\u002BU9MQjBNjGBepYdSMEWq/nCFXsZeSr6Khli1tcsm75kldjbqaGSJciaUxLqQlCdQzBS/0P6Ki7KMwb/bm9qVO7g==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-ac9d2951125b6e449407e72923c2d6fb-0507008691aaab48-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "ee23123a-ff95-3743-4dba-a356d3b44e1b", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=157-313", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 157-313/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "ee23123a-ff95-3743-4dba-a356d3b44e1b", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de924-e01e-0043-5efe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "e4suhK8lgv76J/Jggf57jFk5bZnHpqa6oBdqqix2k//jQCia2k\u002BD4Kz1RHuwjCEy82mNvDi0JtXkdOSXutjRhJTyfUHV4bIJtozs0WEqChT1OLbC6dbHLhlTtWZr6yXUI8Dmvg7gXBK2gwarx1VzuWnPooIhDpu9K04ogitMncpmGK2hQ0wFFdukC2XQG2bD3/v8nEmjshUlKZLAtw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-773ae73907ab234a8b10fa5dfff6d943-da45c684dc228442-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "a9344e50-95b0-309e-fa3e-e77131ac6043", + "x-ms-date": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-range": "bytes=314-470", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 314-470/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "a9344e50-95b0-309e-fa3e-e77131ac6043", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de938-e01e-0043-6ffe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "2uZq5jxFr6jSdR3JjKZ9AjXtEOdkozBPAIDDZZh8GrlFXQG9oX6T1rcbDYXe0r38NRLvO4LYUze2Gua8gUkPVVtkhC6lwGG3WTovSZWqcS7DxzCGjd43bArKBoaY11RuC7cqek2OVnSwOOJHqAmhDKm4\u002BboZich8LBgk2rnaHoHlwT3ef/fgNHC71VX878MAKfrd4N6aVYHurk01aQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-a1d1cd6f819eda48bf0a01984df68d90-8e297298295d814e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "8406352d-a47f-766a-673e-ce96ee005c0f", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=471-627", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 471-627/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "8406352d-a47f-766a-673e-ce96ee005c0f", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de946-e01e-0043-7dfe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "e7rVGWBh58vCjLHXxElds3N/PFQjsb/9PqJLbLvw0amqyvQA/zubjtw1Z20rCBpQ9816LhHhRsr62Nq\u002BFnoP0boILflVXLQ45K537kpS0TWI\u002B3TtdqOYyYQqBidh3XhhJZWyI9sg\u002BpsZQkliaVh/qAekNRoGIlsXcGR1K1aN1gY2Fsm0n65aZwDZ1DRZNV8sql0fvKddWwI6m7J3qw==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-6a755e782694a04d8a1ec8449e3118bb-9a085cd6c135f543-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "fc4482e6-dca6-3bc3-d817-842ff380759f", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=628-784", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 628-784/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "fc4482e6-dca6-3bc3-d817-842ff380759f", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de94f-e01e-0043-04fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "z0WMa0MsU47KCFhKzhosTSe9OwRJgPhtGl7YusZ6BPvjuzCTR8gWCAyd31TGctlDLrQS3XdyYk5MH1HkU5nIUARdZhLpmAZepzL4q6GjpqeFlYsRaCaEcFniRyd3MrVy1Y9QaQvGWpWutDZFerasqHipXr4Otuje2PW4B1He53V8hdYSUzybvjr1Y7TvPvajNZ6vzdP2ZB6ncZAWDQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-34b2384957375641ba7af5a5ff753d0a-7ecdbd2fd7028041-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "6baf2718-5fbb-1b69-eed3-c3431cd19bb7", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=785-941", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "157", + "Content-Range": "bytes 785-941/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "6baf2718-5fbb-1b69-eed3-c3431cd19bb7", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de96c-e01e-0043-20fe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "U0TPzMHI9N\u002BaVTwNoIMYUzPumv8V0bMvGaGGIbhr4ayFsOexk\u002BcudgLDm0lzwDNzS8pzg/YW\u002BIGQoLMfQtMnLnOPh\u002B\u002BbZ8BMwCP8UjZqvFluEtXt\u002BF61b7hmIPM0DjMvm2zV4zhE1\u002BQKJsLSGaVKJuNZsC/9OjYV2Rg2NUfNGLHvzbfKldkhKjvx49Gb41USG/kP2l0EMWE2vwkfDQ==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9/test-blob-90c14a77-a6bc-1333-0628-f3cd09588bae", + "RequestMethod": "GET", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-75be2878e805fc47b8f43ddbed4243c4-d1f6d6a46b935b4a-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "cb178003-09d0-d9e4-b928-6a70f8c76e64", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-range": "bytes=942-1098", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 206, + "ResponseHeaders": { + "Accept-Ranges": "bytes", + "Content-Length": "82", + "Content-Range": "bytes 942-1023/1024", + "Content-Type": "application/octet-stream", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "ETag": "\u00220x8D7EE152301BD56\u0022", + "Last-Modified": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-blob-content-md5": "4\u002B8KTsb7wFod2OLq\u002BfXIoA==", + "x-ms-blob-type": "BlockBlob", + "x-ms-client-request-id": "cb178003-09d0-d9e4-b928-6a70f8c76e64", + "x-ms-creation-time": "Fri, 01 May 2020 21:18:04 GMT", + "x-ms-is-current-version": "true", + "x-ms-lease-state": "available", + "x-ms-lease-status": "unlocked", + "x-ms-request-id": "099de979-e01e-0043-2bfe-1fabfc000000", + "x-ms-server-encrypted": "true", + "x-ms-version": "2019-12-12", + "x-ms-version-id": "2020-05-01T21:18:04.5265000Z" + }, + "ResponseBody": "oZw8DhDiTO6AkQo4hvMMVmR69sMjGk6\u002BPVPrnFWT1c8BHZlUp8OKAAgkmmcEodcQJArSRRJb6YZOftC997CY7Lw6NmhZt6\u002Bg7/W7/3/muZx82g==" + }, + { + "RequestUri": "https://seanmcccanary.blob.core.windows.net/test-container-37979ee9-d552-7d98-1590-9cf48707d6e9?restype=container", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Authorization": "Sanitized", + "traceparent": "00-c58a271797ebe24ab7e134cf5772365d-d804b1efc6df294e-00", + "User-Agent": [ + "azsdk-net-Storage.Blobs/12.5.0-dev.20200501.1", + "(.NET Core 4.6.28325.01; Microsoft Windows 10.0.18362 )" + ], + "x-ms-client-request-id": "9c3f3a33-cfb9-50e6-8508-86f663f231b1", + "x-ms-date": "Fri, 01 May 2020 21:18:05 GMT", + "x-ms-return-client-request-id": "true", + "x-ms-version": "2019-12-12" + }, + "RequestBody": null, + "StatusCode": 202, + "ResponseHeaders": { + "Content-Length": "0", + "Date": "Fri, 01 May 2020 21:18:04 GMT", + "Server": [ + "Windows-Azure-Blob/1.0", + "Microsoft-HTTPAPI/2.0" + ], + "x-ms-client-request-id": "9c3f3a33-cfb9-50e6-8508-86f663f231b1", + "x-ms-request-id": "099de983-e01e-0043-34fe-1fabfc000000", + "x-ms-version": "2019-12-12" + }, + "ResponseBody": [] + } + ], + "Variables": { + "RandomSeed": "1584867388", + "Storage_TestConfigDefault": "ProductionTenant\nseanmcccanary\nU2FuaXRpemVk\nhttps://seanmcccanary.blob.core.windows.net\nhttps://seanmcccanary.file.core.windows.net\nhttps://seanmcccanary.queue.core.windows.net\nhttps://seanmcccanary.table.core.windows.net\n\n\n\n\nhttps://seanmcccanary-secondary.blob.core.windows.net\nhttps://seanmcccanary-secondary.file.core.windows.net\nhttps://seanmcccanary-secondary.queue.core.windows.net\nhttps://seanmcccanary-secondary.table.core.windows.net\n\nSanitized\n\n\nCloud\nBlobEndpoint=https://seanmcccanary.blob.core.windows.net/;QueueEndpoint=https://seanmcccanary.queue.core.windows.net/;FileEndpoint=https://seanmcccanary.file.core.windows.net/;BlobSecondaryEndpoint=https://seanmcccanary-secondary.blob.core.windows.net/;QueueSecondaryEndpoint=https://seanmcccanary-secondary.queue.core.windows.net/;FileSecondaryEndpoint=https://seanmcccanary-secondary.file.core.windows.net/;AccountName=seanmcccanary;AccountKey=Sanitized\nseanscope1" + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/LazyLoadingBlobStreamTests/ReadAsync_InvalidParameterTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPage.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/GetPageAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalized.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/SegmentTests/HasNext_NotInitalizedAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeft.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_ChunksLeftAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNext.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_CurrentChunkHasNextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_False.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_FalseAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizes.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/HasNext_NotInitalizesAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/Next.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ShardTests/NextAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file From f60dc32c0af44d0baa64213ac8eb9de1424ab580 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 1 May 2020 15:40:12 -0700 Subject: [PATCH 08/30] Fixed build --- .../tests/BlobChangeFeedAsyncPagableTests.cs | 1 - .../tests/BlobChangeFeedPagableTests.cs | 1 - .../Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs | 2 +- 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index 66aeb01d3afa2..9955c4fcf772c 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -5,7 +5,6 @@ using System.Collections.Generic; using System.Text; using System.Threading.Tasks; -using Azure.Core.Testing; using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs index 86e3ea02abb06..76a7eae639687 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedPagableTests.cs @@ -5,7 +5,6 @@ using System.Collections.Generic; using System.Linq; using System.Text; -using Azure.Core.Testing; using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index ca002b689a82f..115ac85c5f2a9 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -5,7 +5,7 @@ using System.Collections.Generic; using System.Threading.Tasks; using Azure.Core; -using Azure.Core.Testing; +using Azure.Core.TestFramework; using Azure.Storage.Blobs; using Azure.Storage.Blobs.Models; using Azure.Storage.Test.Shared; From c629fed1d4df114007da2e39624bc67538b1f421 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Tue, 5 May 2020 18:19:52 -0700 Subject: [PATCH 09/30] Added factories to Chunk --- .../src/AvroReaderFactory.cs | 29 +++++ .../src/Chunk.cs | 19 +-- .../src/LazyLoadingBlobStream.cs | 7 +- .../src/LazyLoadingBlobStreamFactory.cs | 21 ++++ .../src/Shard.cs | 8 +- .../tests/BlobChangeFeedAsyncPagableTests.cs | 5 +- .../tests/ChunkTests.cs | 108 ++++++++++++++++-- .../src/Shared/StorageVersionExtensions.cs | 2 +- 8 files changed, 173 insertions(+), 26 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs new file mode 100644 index 0000000000000..13ba1f019e595 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/AvroReaderFactory.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.IO; +using Azure.Storage.Internal.Avro; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Creates AvroReaders. Allows us to inject mock AvroReaders in + /// the Chunk unit tests. + /// + internal class AvroReaderFactory + { + public virtual AvroReader BuildAvroReader(Stream dataStream) + => new AvroReader(dataStream); + + public virtual AvroReader BuildAvroReader( + Stream dataStream, + Stream headStream, + long blockOffset, + long eventIndex) + => new AvroReader( + dataStream, + headStream, + blockOffset, + eventIndex); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index bcaa87e474fc1..30b63170c40f5 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -49,6 +49,8 @@ internal class Chunk : IDisposable public Chunk( BlobContainerClient containerClient, + LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory, + AvroReaderFactory avroReaderFactory, string chunkPath, long? blockOffset = default, long? eventIndex = default) @@ -57,7 +59,7 @@ public Chunk( BlockOffset = blockOffset ?? 0; EventIndex = eventIndex ?? 0; - _dataStream = new LazyLoadingBlobStream( + _dataStream = lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( _blobClient, offset: BlockOffset, blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize); @@ -65,12 +67,12 @@ public Chunk( // We aren't starting from the beginning of the Chunk if (BlockOffset != 0) { - _headStream = new LazyLoadingBlobStream( + _headStream = lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( _blobClient, offset: 0, blockSize: 3 * Constants.KB); - _avroReader = new AvroReader( + _avroReader = avroReaderFactory.BuildAvroReader( _dataStream, _headStream, BlockOffset, @@ -78,11 +80,10 @@ public Chunk( } else { - _avroReader = new AvroReader(_dataStream); + _avroReader = avroReaderFactory.BuildAvroReader(_dataStream); } } - //TODO what if the Segment isn't Finalized?? public virtual bool HasNext() => _avroReader.HasNext(); @@ -110,14 +111,6 @@ public void Dispose() GC.SuppressFinalize(this); } - /// - /// Constructor for testing. Do not use. - /// - internal Chunk(AvroReader avroReader) - { - _avroReader = avroReader; - } - /// /// Constructor for mocking. Do not use. /// diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs index 84ad361165d6f..c9134848dc074 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs @@ -58,6 +58,11 @@ public LazyLoadingBlobStream(BlobClient blobClient, long offset, long blockSize) _initalized = false; } + /// + /// Constructor for mocking. + /// + public LazyLoadingBlobStream() { } + /// public override int Read( byte[] buffer, @@ -239,4 +244,4 @@ public override void Write(byte[] buffer, int offset, int count) protected override void Dispose(bool disposing) => _stream.Dispose(); } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs new file mode 100644 index 0000000000000..73fd0a94b896e --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStreamFactory.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Creates LazyLoadingBlobStreams. Allows us to inject mock + /// LazyLoadingBlobStreams in the Chunk unit tests. + /// + internal class LazyLoadingBlobStreamFactory + { + public virtual LazyLoadingBlobStream BuildLazyLoadingBlobStream( + BlobClient blobClient, + long offset, + long blockSize) + => new LazyLoadingBlobStream( + blobClient, + offset, + blockSize); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index 1125b35b62b9c..084749fa16ff8 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -110,6 +110,8 @@ private async Task Initalize(bool async) _currentChunk = new Chunk( _containerClient, + new LazyLoadingBlobStreamFactory(), + new AvroReaderFactory(), _chunks.Dequeue(), _blockOffset, _eventIndex); @@ -153,7 +155,11 @@ public virtual async Task Next( // Remove currentChunk if it doesn't have another event. if (!_currentChunk.HasNext() && _chunks.Count > 0) { - _currentChunk = new Chunk(_containerClient, _chunks.Dequeue()); + _currentChunk = new Chunk( + _containerClient, + new LazyLoadingBlobStreamFactory(), + new AvroReaderFactory(), + _chunks.Dequeue()); _chunkIndex++; } return changeFeedEvent; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index 9955c4fcf772c..17ffee3b7a3cb 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Text; using System.Threading.Tasks; +using Azure.Core.TestFramework; using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; @@ -14,7 +15,7 @@ namespace Azure.Storage.Blobs.ChangeFeed.Tests public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase { public BlobChangeFeedAsyncPagableTests(bool async) - : base(async, null /* RecordedTestMode.Record /* to re-record */) + : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) { } @@ -23,7 +24,7 @@ public BlobChangeFeedAsyncPagableTests(bool async) //TODO page size tests [Test] - [Ignore("")] + //[Ignore("")] public async Task Test() { BlobServiceClient service = GetServiceClient_SharedKey(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs index 1892a04fcacb4..1d7f327adfecd 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; +using System.IO; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -25,15 +26,41 @@ public ChunkTests(bool async) public void HasNext_True() { // Arrange + string chunkPath = "chunkPath"; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); Mock avroReader = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.Setup(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(lazyLoadingBlobStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); avroReader.Setup(r => r.HasNext()).Returns(true); - Chunk chunk = new Chunk(avroReader.Object); + + Chunk chunk = new Chunk( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object, + chunkPath); // Act bool hasNext = chunk.HasNext(); // Assert Assert.IsTrue(hasNext); + + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + avroReaderFactory.Verify(r => r.BuildAvroReader(lazyLoadingBlobStream.Object)); avroReader.Verify(r => r.HasNext()); } @@ -41,15 +68,42 @@ public void HasNext_True() public void HasNext_False() { // Arrange + string chunkPath = "chunkPath"; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); Mock avroReader = new Mock(MockBehavior.Strict); - avroReader.Setup(r => r.HasNext()).Returns(false); - Chunk chunk = new Chunk(avroReader.Object); + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock lazyLoadingBlobStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.Setup(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(lazyLoadingBlobStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); + avroReader.Setup(r => r.HasNext()).Returns(false) + ; + + Chunk chunk = new Chunk( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object, + chunkPath); // Act bool hasNext = chunk.HasNext(); // Assert Assert.IsFalse(hasNext); + + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + avroReaderFactory.Verify(r => r.BuildAvroReader(lazyLoadingBlobStream.Object)); avroReader.Verify(r => r.HasNext()); } @@ -57,8 +111,9 @@ public void HasNext_False() public async Task Next() { // Arrange + string chunkPath = "chunkPath"; long blockOffset = 5; - long objectIndex = 10; + long eventIndex = 10; string topic = "topic"; string subject = "subject"; @@ -110,19 +165,42 @@ public async Task Next() } }; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock avroReaderFactory = new Mock(MockBehavior.Strict); Mock avroReader = new Mock(MockBehavior.Strict); - + Mock lazyLoadingBlobStreamFactory = new Mock(MockBehavior.Strict); + Mock dataStream = new Mock(MockBehavior.Strict); + Mock headStream = new Mock(MockBehavior.Strict); + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + lazyLoadingBlobStreamFactory.SetupSequence(r => r.BuildLazyLoadingBlobStream( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(dataStream.Object) + .Returns(headStream.Object); + avroReaderFactory.Setup(r => r.BuildAvroReader( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())).Returns(avroReader.Object); avroReader.Setup(r => r.HasNext()).Returns(true); - avroReader.Setup(r => r.Next( It.IsAny(), It.IsAny())) .ReturnsAsync(record); avroReader.Setup(r => r.BlockOffset).Returns(blockOffset); - avroReader.Setup(r => r.ObjectIndex).Returns(objectIndex); + avroReader.Setup(r => r.ObjectIndex).Returns(eventIndex); - Chunk chunk = new Chunk(avroReader.Object); + Chunk chunk = new Chunk( + containerClient.Object, + lazyLoadingBlobStreamFactory.Object, + avroReaderFactory.Object, + chunkPath, + blockOffset, + eventIndex); // Act BlobChangeFeedEvent changeFeedEvent = await chunk.Next(IsAsync); @@ -150,6 +228,20 @@ public async Task Next() Assert.AreEqual(recursive, changeFeedEvent.EventData.Recursive); Assert.AreEqual(sequencer, changeFeedEvent.EventData.Sequencer); + containerClient.Verify(r => r.GetBlobClient(chunkPath)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + blockOffset, + Constants.ChangeFeed.ChunkBlockDownloadSize)); + lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( + blobClient.Object, + 0, + 3 * Constants.KB)); + avroReaderFactory.Verify(r => r.BuildAvroReader( + dataStream.Object, + headStream.Object, + blockOffset, + eventIndex)); avroReader.Verify(r => r.HasNext()); avroReader.Verify(r => r.Next( IsAsync, diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs index ff480ae550a65..9de33dc548600 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs @@ -51,7 +51,7 @@ public static string ToVersionString(this ServiceVersion version) => ServiceVersion.V2019_02_02 => "2019-02-02", ServiceVersion.V2019_07_07 => "2019-07-07", // TODO this is temporary until 73 goes to stage. - ServiceVersion.V2019_12_12 => "2019-12-12", + ServiceVersion.V2019_12_12 => "2019-10-10", #elif QueueSDK // Queues just bumped the version number without changing the swagger ServiceVersion.V2019_02_02 => "2018-11-09", From 60fc6490a88aa6555cf2b914de3333477e31ce06 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Tue, 5 May 2020 19:33:42 -0700 Subject: [PATCH 10/30] Added factories to Shard --- .../src/ChunkFactory.cs | 41 ++ .../src/Segment.cs | 8 +- .../src/Shard.cs | 132 +---- .../src/ShardFactory.cs | 86 ++++ .../tests/ChangeFeedTests.cs | 4 +- .../tests/ChunkTests.cs | 24 +- .../tests/ShardTests.cs | 467 ++++++++++++++++-- 7 files changed, 592 insertions(+), 170 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs new file mode 100644 index 0000000000000..e51c79b22ed2b --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChunkFactory + { + private readonly LazyLoadingBlobStreamFactory _lazyLoadingBlobStreamFactory; + private readonly AvroReaderFactory _avroReaderFactory; + + public ChunkFactory( + LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory, + AvroReaderFactory avroReaderFactory) + { + _lazyLoadingBlobStreamFactory = lazyLoadingBlobStreamFactory; + _avroReaderFactory = avroReaderFactory; + } + + public virtual Chunk BuildChunk( + BlobContainerClient containerClient, + string chunkPath, + long? blockOffset = default, + long? eventIndex = default) + => new Chunk( + containerClient, + _lazyLoadingBlobStreamFactory, + _avroReaderFactory, + chunkPath, + blockOffset, + eventIndex); + + /// + /// Constructor for mocking. + /// + public ChunkFactory() { } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 9226e4523ad3d..e5c145f80dbf1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -102,7 +102,13 @@ private async Task Initalize(bool async) { //TODO cleanup this line string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); - Shard shard = new Shard(_containerClient, shardPath, _cursor?.ShardCursors?[i]); + ShardFactory shardFactory = new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory())); + Shard shard = await shardFactory.BuildShard( + async, + _containerClient, + shardPath, + _cursor?.ShardCursors?[i]) + .ConfigureAwait(false); _shards.Add(shard); i++; } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index 084749fa16ff8..fc821c24adcbe 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -18,9 +18,9 @@ internal class Shard : IDisposable private readonly BlobContainerClient _containerClient; /// - /// The path to this Shard. + /// ChunkFactory. /// - private readonly string _shardPath; + private readonly ChunkFactory _chunkFactory; /// /// Queue of the paths to Chunks we haven't processed. @@ -38,111 +38,27 @@ internal class Shard : IDisposable private long _chunkIndex; /// - /// The byte offset of the beginning of the - /// current Avro block. Only used to initalize a - /// Shard from a Sursor. + /// Gets the for this Shard. /// - private readonly long _blockOffset; - - /// - /// Index of the current event within the - /// Avro block. Only used to initalize a - /// Shard from a Sursor. - /// - private readonly long _eventIndex; - - /// - /// If this Shard has been initalized. - /// - private bool _isInitialized; - - public Shard( - BlobContainerClient containerClient, - string shardPath, - ShardCursor shardCursor = default) - { - _containerClient = containerClient; - _shardPath = shardPath; - _chunks = new Queue(); - _isInitialized = false; - _chunkIndex = shardCursor?.ChunkIndex ?? 0; - _blockOffset = shardCursor?.BlockOffset ?? 0; - _eventIndex = shardCursor?.EventIndex ?? 0; - } - - private async Task Initalize(bool async) - { - // Get Chunks - if (async) - { - await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( - prefix: _shardPath).ConfigureAwait(false)) - { - if (blobHierarchyItem.IsPrefix) - continue; - - //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); - _chunks.Enqueue(blobHierarchyItem.Blob.Name); - } - } - else - { - foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( - prefix: _shardPath)) - { - if (blobHierarchyItem.IsPrefix) - continue; - - //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); - _chunks.Enqueue(blobHierarchyItem.Blob.Name); - } - } - - // Fast forward to current Chunk - if (_chunkIndex > 0) - { - //TODO possible off by 1 error here. - for (int i = 0; i < _chunkIndex; i++) - { - _chunks.Dequeue(); - } - } - - _currentChunk = new Chunk( - _containerClient, - new LazyLoadingBlobStreamFactory(), - new AvroReaderFactory(), - _chunks.Dequeue(), - _blockOffset, - _eventIndex); - _isInitialized = true; - } - public virtual ShardCursor GetCursor() => new ShardCursor( _chunkIndex, _currentChunk.BlockOffset, _currentChunk.EventIndex); + /// + /// If this Shard has a next event. + /// public virtual bool HasNext() - { - if (!_isInitialized) - { - return true; - } - - return _chunks.Count > 0 || _currentChunk.HasNext(); - } + => _chunks.Count > 0 || _currentChunk.HasNext(); + /// + /// Gets the next . + /// public virtual async Task Next( bool async, CancellationToken cancellationToken = default) { - if (!_isInitialized) - { - await Initalize(async).ConfigureAwait(false); - } - if (!HasNext()) { throw new InvalidOperationException("Shard doesn't have any more events"); @@ -155,10 +71,8 @@ public virtual async Task Next( // Remove currentChunk if it doesn't have another event. if (!_currentChunk.HasNext() && _chunks.Count > 0) { - _currentChunk = new Chunk( + _currentChunk = _chunkFactory.BuildChunk( _containerClient, - new LazyLoadingBlobStreamFactory(), - new AvroReaderFactory(), _chunks.Dequeue()); _chunkIndex++; } @@ -169,24 +83,24 @@ public virtual async Task Next( public void Dispose() => _currentChunk.Dispose(); /// - /// Constructor for testing. Do not use. + /// Constructor for use by . /// - internal Shard( - Chunk chunk = default, - long chunkIndex = default, - bool isInitalized = default, - Queue chunks = default, - BlobContainerClient containerClient = default) + public Shard( + BlobContainerClient containerClient, + ChunkFactory chunkFactory, + Queue chunks, + Chunk currentChunk, + long chunkIndex) { - _currentChunk = chunk; - _chunkIndex = chunkIndex; - _isInitialized = isInitalized; - _chunks = chunks; _containerClient = containerClient; + _chunkFactory = chunkFactory; + _chunks = chunks; + _currentChunk = currentChunk; + _chunkIndex = chunkIndex; } /// - /// Constructor for mocking. Do not use. + /// Constructor for mocking. /// internal Shard() { } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs new file mode 100644 index 0000000000000..38a2f5ff7566f --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + /// + /// Builds a Shard. + /// + internal class ShardFactory + { + private readonly ChunkFactory _chunkFactory; + + public ShardFactory(ChunkFactory chunkFactory) + { + _chunkFactory = chunkFactory; + } + +#pragma warning disable CA1822 // Does not acces instance data can be marked static. + public async Task BuildShard( +#pragma warning restore CA1822 // Can't mock static methods in MOQ. + bool async, + BlobContainerClient containerClient, + string shardPath, + ShardCursor shardCursor = default) + { + // Models we'll need later + Queue chunks = new Queue(); + long chunkIndex = shardCursor?.ChunkIndex ?? 0; + long blockOffset = shardCursor?.BlockOffset ?? 0; + long eventIndex = shardCursor?.EventIndex ?? 0; + + // Get Chunks + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + prefix: shardPath).ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + prefix: shardPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + //Chunk chunk = new Chunk(_containerClient, blobHierarchyItem.Blob.Name); + chunks.Enqueue(blobHierarchyItem.Blob.Name); + } + } + + // Fast forward to current Chunk + if (chunkIndex > 0) + { + for (int i = 0; i < chunkIndex; i++) + { + chunks.Dequeue(); + } + } + + Chunk currentChunk = _chunkFactory.BuildChunk( + containerClient, + chunks.Dequeue(), + blockOffset, + eventIndex); + + return new Shard( + containerClient, + _chunkFactory, + chunks, + currentChunk, + chunkIndex); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 2ffe1b67aaedf..c7b18c451bb2f 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -88,7 +88,7 @@ public async Task GetSegmentsInYearTest() if (IsAsync) { - AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, @@ -100,7 +100,7 @@ public async Task GetSegmentsInYearTest() else { Pageable pageable = - PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs index 1d7f327adfecd..d3c178895fb49 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -43,10 +43,11 @@ public void HasNext_True() avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); avroReader.Setup(r => r.HasNext()).Returns(true); - Chunk chunk = new Chunk( - containerClient.Object, + ChunkFactory chunkFactory = new ChunkFactory( lazyLoadingBlobStreamFactory.Object, - avroReaderFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + containerClient.Object, chunkPath); // Act @@ -83,13 +84,13 @@ public void HasNext_False() It.IsAny())) .Returns(lazyLoadingBlobStream.Object); avroReaderFactory.Setup(r => r.BuildAvroReader(It.IsAny())).Returns(avroReader.Object); - avroReader.Setup(r => r.HasNext()).Returns(false) - ; + avroReader.Setup(r => r.HasNext()).Returns(false); - Chunk chunk = new Chunk( - containerClient.Object, + ChunkFactory chunkFactory = new ChunkFactory( lazyLoadingBlobStreamFactory.Object, - avroReaderFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + containerClient.Object, chunkPath); // Act @@ -194,10 +195,11 @@ public async Task Next() avroReader.Setup(r => r.BlockOffset).Returns(blockOffset); avroReader.Setup(r => r.ObjectIndex).Returns(eventIndex); - Chunk chunk = new Chunk( - containerClient.Object, + ChunkFactory chunkFactory = new ChunkFactory( lazyLoadingBlobStreamFactory.Object, - avroReaderFactory.Object, + avroReaderFactory.Object); + Chunk chunk = chunkFactory.BuildChunk( + containerClient.Object, chunkPath, blockOffset, eventIndex); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index 83b6d1f0d209d..1808b7d73a899 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -21,21 +21,67 @@ public ShardTests(bool async) } [Test] - public void GetCursor() + public async Task GetCursor() { // Arrange - long chunkIndex = 5; + string shardPath = "shardPath"; + long chunkIndex = 2; long blockOffset = 100; long eventIndex = 200; + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); Mock chunk = new Mock(MockBehavior.Strict); + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + chunk.Setup(r => r.BlockOffset).Returns(blockOffset); chunk.Setup(r => r.EventIndex).Returns(eventIndex); - Shard shard = new Shard(chunk.Object, chunkIndex); + ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + containerClient.Object, + shardPath, + shardCursor) + .ConfigureAwait(false); + + ShardCursor cursor = shard.GetCursor(); // Assert @@ -43,80 +89,307 @@ public void GetCursor() Assert.AreEqual(blockOffset, cursor.BlockOffset); Assert.AreEqual(eventIndex, cursor.EventIndex); + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk2", + blockOffset, + eventIndex)); + chunk.Verify(r => r.BlockOffset); chunk.Verify(r => r.EventIndex); } [Test] - public void HasNext_NotInitalizes() + public async Task HasNext_False() { // Arrange - Shard shard = new Shard(isInitalized: false); - - // Act - bool hasNext = shard.HasNext(); + string shardPath = "shardPath"; + long chunkIndex = 4; + long blockOffset = 100; + long eventIndex = 200; - // Assert - Assert.IsTrue(hasNext); - } + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); - [Test] - public void HasNext_False() - { - // Arrange + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + chunk.Setup(r => r.HasNext()).Returns(false); - Queue chunks = new Queue(); - Shard shard = new Shard( - chunk.Object, - isInitalized: true, - chunks: chunks); + ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + containerClient.Object, + shardPath, + shardCursor) + .ConfigureAwait(false); + bool hasNext = shard.HasNext(); // Assert Assert.IsFalse(hasNext); + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk4", + blockOffset, + eventIndex)); + chunk.Verify(r => r.HasNext()); } [Test] - public void HasNext_ChunksLeft() + public async Task HasNext_ChunksLeft() { // Arrange - Queue chunks = new Queue(); - chunks.Enqueue("chunk"); - Shard shard = new Shard( - isInitalized: true, - chunks: chunks); + string shardPath = "shardPath"; + long chunkIndex = 2; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + containerClient.Object, + shardPath, + shardCursor) + .ConfigureAwait(false); + bool hasNext = shard.HasNext(); // Assert Assert.IsTrue(hasNext); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk2", + blockOffset, + eventIndex)); } [Test] - public void HasNext_CurrentChunkHasNext() + public async Task HasNext_CurrentChunkHasNext() { // Arrange + string shardPath = "shardPath"; + long chunkIndex = 4; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + chunk.Setup(r => r.HasNext()).Returns(true); - Shard shard = new Shard( - chunk: chunk.Object, - isInitalized: true, - chunks: new Queue()); + ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + containerClient.Object, + shardPath, + shardCursor) + .ConfigureAwait(false); + bool hasNext = shard.HasNext(); // Assert Assert.IsTrue(hasNext); + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk4", + blockOffset, + eventIndex)); + chunk.Verify(r => r.HasNext()); } @@ -129,37 +402,137 @@ public async Task Next() { Id = eventId }; - string secondChunkName = "chunk"; - Mock chunk = new Mock(MockBehavior.Strict); - chunk.Setup(r => r.HasNext()).Returns(true); - chunk.Setup(r => r.Next(It.IsAny(), default)).Returns(Task.FromResult(expectedChangeFeedEvent)); - chunk.Setup(r => r.HasNext()).Returns(false); + string shardPath = "shardPath"; + long chunkIndex = 2; + long blockOffset = 100; + long eventIndex = 200; + + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); Mock containerClient = new Mock(MockBehavior.Strict); - containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns((new Mock()).Object); + Mock chunkFactory = new Mock(MockBehavior.Strict); + Mock chunk = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = PageResponseEnumerator.CreateEnumerable(GetChunkPagesFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + chunkFactory.Setup(r => r.BuildChunk( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(chunk.Object); + + chunk.Setup(r => r.Next( + It.IsAny(), + default)) + .Returns(Task.FromResult(expectedChangeFeedEvent)); + + chunk.SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(true); + + chunk.Setup(r => r.BlockOffset).Returns(blockOffset); + chunk.Setup(r => r.EventIndex).Returns(eventIndex); - Queue chunks = new Queue(); - chunks.Enqueue(secondChunkName); - Shard shard = new Shard( - chunk: chunk.Object, - isInitalized: true, - chunks: chunks, - containerClient: containerClient.Object); + ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); // Act + Shard shard = await shardFactory.BuildShard( + IsAsync, + containerClient.Object, + shardPath, + shardCursor) + .ConfigureAwait(false); + BlobChangeFeedEvent changeFeedEvent = await shard.Next(IsAsync); ShardCursor cursor = shard.GetCursor(); // Assert Assert.AreEqual(eventId, changeFeedEvent.Id); - Assert.AreEqual(1, cursor.ChunkIndex); + Assert.AreEqual(2, cursor.ChunkIndex); + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + shardPath, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + shardPath, + default)); + } + + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk2", + blockOffset, + eventIndex)); chunk.Verify(r => r.HasNext()); chunk.Verify(r => r.Next(IsAsync, default)); chunk.Verify(r => r.HasNext()); - - containerClient.Verify(r => r.GetBlobClient(secondChunkName)); + chunk.Verify(r => r.BlockOffset); + chunk.Verify(r => r.EventIndex); } + + private static Task> GetChunkPagesFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetChunkPagesFunc(continuation, pageSizeHint)); + + private static Page GetChunkPagesFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk0", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk1", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk2", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk3", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk4", false, null)), + }); } } From 39f5bc2c90b1802185f46630f356cf5643117394 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Tue, 5 May 2020 21:01:46 -0700 Subject: [PATCH 11/30] Most the way through adding Factories to Segment --- .../src/ChangeFeed.cs | 21 +- .../src/Segment.cs | 107 +----- .../src/SegmentFactory.cs | 94 ++++++ .../src/ShardFactory.cs | 7 +- ...zure.Storage.Blobs.ChangeFeed.Tests.csproj | 5 + .../tests/Resources/SegmentManifest.json | 28 ++ .../tests/SegmentTests.cs | 314 +++++++++++------- 7 files changed, 361 insertions(+), 215 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index d5c351a55c64d..b638cf5b0ab69 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -62,6 +62,8 @@ internal class ChangeFeed /// private bool _isInitalized; + private readonly SegmentFactory _segmentFactory; + // Start time will be rounded down to the nearest hour. public ChangeFeed( BlobServiceClient blobServiceClient, @@ -74,6 +76,7 @@ public ChangeFeed( _isInitalized = false; _startTime = startTime.RoundDownToNearestHour(); _endTime = endTime.RoundUpToNearestHour(); + _segmentFactory = new SegmentFactory(new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory()))); } public ChangeFeed( @@ -89,6 +92,7 @@ public ChangeFeed( _startTime = cursor.CurrentSegmentCursor.SegmentTime; _endTime = cursor.EndTime; _currentSegmentCursor = cursor.CurrentSegmentCursor; + _segmentFactory = new SegmentFactory(new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory()))); } /// @@ -174,10 +178,12 @@ private async Task Initalize(bool async) endTime: MinDateTime(_lastConsumable, _endTime)) .ConfigureAwait(false); - _currentSegment = new Segment( + _currentSegment = await _segmentFactory.BuildSegment( + async, _containerClient, _segments.Dequeue(), - _currentSegmentCursor); + _currentSegmentCursor) + .ConfigureAwait(false); _isInitalized = true; } @@ -312,7 +318,10 @@ private async Task AdvanceSegmentIfNecessary(bool async) // If the current segment is completed, remove it if (!_currentSegment.HasNext() && _segments.Count > 0) { - _currentSegment = new Segment(_containerClient, _segments.Dequeue()); + _currentSegment = await _segmentFactory.BuildSegment( + async, + _containerClient, + _segments.Dequeue()).ConfigureAwait(false); } // If _segments is empty, refill it @@ -330,7 +339,11 @@ private async Task AdvanceSegmentIfNecessary(bool async) if (_segments.Count > 0) { - _currentSegment = new Segment(_containerClient, _segments.Dequeue()); + _currentSegment = await _segmentFactory.BuildSegment( + async, + _containerClient, + _segments.Dequeue()) + .ConfigureAwait(false); } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index e5c145f80dbf1..a923334ba0e20 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -3,12 +3,9 @@ using System; using System.Collections.Generic; -using System.Text; using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Azure.Core.Pipeline; -using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Blobs.Models; @@ -31,11 +28,6 @@ internal class Segment /// private readonly BlobContainerClient _containerClient; - /// - /// The path to the manifest for this Segment. - /// - private readonly string _manifestPath; - /// /// The Shards associated with this Segment. /// @@ -46,73 +38,18 @@ internal class Segment /// private int _shardIndex; - /// - /// If this Segement has been initalized. - /// - private bool _isInitalized; - - private SegmentCursor _cursor; - public Segment( BlobContainerClient containerClient, - string manifestPath, - SegmentCursor cursor = default) + List shards, + int shardIndex, + DateTimeOffset dateTime, + bool finalized) { _containerClient = containerClient; - _manifestPath = manifestPath; - DateTime = manifestPath.ToDateTimeOffset().Value; - _shards = new List(); - _cursor = cursor; - _shardIndex = cursor?.ShardIndex ?? 0; - } - - private async Task Initalize(bool async) - { - // Download segment manifest - BlobClient blobClient = _containerClient.GetBlobClient(_manifestPath); - BlobDownloadInfo blobDownloadInfo; - - if (async) - { - blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); - } - else - { - blobDownloadInfo = blobClient.Download(); - } - - // Parse segment manifest - JsonDocument jsonManifest; - - if (async) - { - jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); - } - else - { - jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); - } - - // Initalized Finalized field - string statusString = jsonManifest.RootElement.GetProperty("status").GetString(); - Finalized = statusString == "Finalized"; - - int i = 0; - foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) - { - //TODO cleanup this line - string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); - ShardFactory shardFactory = new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory())); - Shard shard = await shardFactory.BuildShard( - async, - _containerClient, - shardPath, - _cursor?.ShardCursors?[i]) - .ConfigureAwait(false); - _shards.Add(shard); - i++; - } - _isInitalized = true; + _shards = shards; + _shardIndex = shardIndex; + DateTime = dateTime; + Finalized = finalized; } public SegmentCursor GetCursor() @@ -135,11 +72,6 @@ public async Task> GetPage( { List changeFeedEventList = new List(); - if (!_isInitalized) - { - await Initalize(async).ConfigureAwait(false); - } - if (!HasNext()) { throw new InvalidOperationException("Segment doesn't have any more events"); @@ -174,28 +106,11 @@ public async Task> GetPage( //TODO figure out if this is right. public bool HasNext() - { - if (!_isInitalized) - { - return true; - } - - return _shards.Count > 0; - } + => _shards.Count > 0; /// - /// Constructor for testing. Do not use. + /// Constructor for mocking. /// - internal Segment( - bool isInitalized = default, - List shards = default, - int shardIndex = default, - DateTimeOffset dateTime = default) - { - _isInitalized = isInitalized; - _shards = shards; - _shardIndex = shardIndex; - DateTime = dateTime; - } + public Segment() { } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs new file mode 100644 index 0000000000000..93d08e9462a3c --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class SegmentFactory + { + private readonly ShardFactory _shardFactory; + + /// + /// Constructor for mocking. + /// + public SegmentFactory() { } + + public SegmentFactory(ShardFactory shardFactory) + { + _shardFactory = shardFactory; + } + +#pragma warning disable CA1822 // Does not acces instance data can be marked static. + public virtual async Task BuildSegment( +#pragma warning restore CA1822 // Can't mock static methods in MOQ. + bool async, + BlobContainerClient containerClient, + string manifestPath, + SegmentCursor cursor = default) + { + // Models we need for later + List shards = new List(); + DateTimeOffset dateTime = manifestPath.ToDateTimeOffset().Value; + int shardIndex = cursor?.ShardIndex ?? 0; + + // Download segment manifest + BlobClient blobClient = containerClient.GetBlobClient(manifestPath); + BlobDownloadInfo blobDownloadInfo; + + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + // Parse segment manifest + JsonDocument jsonManifest; + + if (async) + { + jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); + } + + // Initalized Finalized field + string statusString = jsonManifest.RootElement.GetProperty("status").GetString(); + bool finalized = statusString == "Finalized"; + + int i = 0; + foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) + { + //TODO cleanup this line + string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); + Shard shard = await _shardFactory.BuildShard( + async, + containerClient, + shardPath, + cursor?.ShardCursors?[i]) + .ConfigureAwait(false); + + shards.Add(shard); + i++; + } + + return new Segment( + containerClient, + shards, + shardIndex, + dateTime, + finalized); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs index 38a2f5ff7566f..87c0df08f079c 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs @@ -20,8 +20,13 @@ public ShardFactory(ChunkFactory chunkFactory) _chunkFactory = chunkFactory; } + /// + /// Constructor for mocking. + /// + public ShardFactory() { } + #pragma warning disable CA1822 // Does not acces instance data can be marked static. - public async Task BuildShard( + public virtual async Task BuildShard( #pragma warning restore CA1822 // Can't mock static methods in MOQ. bool async, BlobContainerClient containerClient, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj index c4a3559bf805b..eb2fd04f30efd 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Azure.Storage.Blobs.ChangeFeed.Tests.csproj @@ -17,4 +17,9 @@ PreserveNewest + + + PreserveNewest + + \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json new file mode 100644 index 0000000000000..7c200036f7a06 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json @@ -0,0 +1,28 @@ +{ + "version": 0, + "begin": "2020-03-25T02:00:00.000Z", + "intervalSecs": 3600, + "status": "Finalized", + "config": { + "version": 0, + "configVersionEtag": "0x8d7d063fb40542c", + "numShards": 1, + "recordsFormat": "avro", + "formatSchemaVersion": 3, + "shardDistFnVersion": 1 + }, + "chunkFilePaths": [ + "$blobchangefeed/log/00/2020/03/25/0200/", + "$blobchangefeed/log/01/2020/03/25/0200/", + "$blobchangefeed/log/02/2020/03/25/0200/", + "$blobchangefeed/log/03/2020/03/25/0200/", + "$blobchangefeed/log/04/2020/03/25/0200/" + ], + "storageDiagnostics": { + "version": 0, + "lastModifiedTime": "2020-03-25T02:26:53.186Z", + "data": { + "aid": "61410c64-2006-0001-004c-02cde706e9dc" + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 85a978ed27ab8..7a05ed616f3db 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -3,8 +3,11 @@ using System; using System.Collections.Generic; +using System.IO; using System.Threading.Tasks; +using Azure.Core; using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; using Moq; using NUnit.Framework; @@ -18,61 +21,109 @@ public SegmentTests(bool async) } [Test] - public void HasNext_NotInitalized() + public async Task GetCursor() { // Arrange - Segment segment = new Segment(isInitalized: false); + string manifestPath = "idx/segments/2020/03/25/0200/meta.json"; - // Act - bool hasNext = segment.HasNext(); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock shardFactory = new Mock(MockBehavior.Strict); - // Assert - Assert.IsTrue(hasNext); - } + List> shards = new List>(); + for (int i = 0; i < 5; i++) + { + shards.Add(new Mock(MockBehavior.Strict)); + } - [Test] - public void HasNext_False() - { - // Arrange - List shards = new List(); - Segment segment = new Segment( - isInitalized: true, - shards: shards); + List shardCursors = new List + { + new ShardCursor(1, 2, 3), + new ShardCursor(4, 5, 6), + new ShardCursor(7, 8, 9), + new ShardCursor(10, 11, 12), + new ShardCursor(13, 14, 15) + }; - // Act - bool hasNext = segment.HasNext(); + DateTimeOffset dateTime = new DateTimeOffset(2020, 3, 25, 2, 0, 0, TimeSpan.Zero); + int shardIndex = 4; - // Assert - Assert.IsFalse(hasNext); - } + SegmentCursor expectedCursor = new SegmentCursor( + dateTime, + shardCursors, + shardIndex); - [Test] - public void GetCursor() - { - // Arrange - DateTimeOffset dateTime = DateTimeOffset.UtcNow; - int shardIndex = 4; - Mock shard = new Mock(MockBehavior.Strict); - Mock shardCursor = new Mock(MockBehavior.Strict); - shard.Setup(r => r.GetCursor()).Returns(shardCursor.Object); - List shards = new List + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new ResponseImplementation()); + + if (IsAsync) { - shard.Object - }; - Segment segment = new Segment( - isInitalized: true, - shards: shards, - shardIndex: shardIndex, - dateTime: dateTime); + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + shardFactory.SetupSequence(r => r.BuildShard( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(shards[0].Object) + .ReturnsAsync(shards[1].Object) + .ReturnsAsync(shards[2].Object) + .ReturnsAsync(shards[3].Object) + .ReturnsAsync(shards[4].Object); + + for (int i = 0; i < shards.Count; i++) + { + shards[i].Setup(r => r.GetCursor()).Returns(shardCursors[i]); + } + + SegmentFactory segmentFactory = new SegmentFactory(shardFactory.Object); + Segment segment = await segmentFactory.BuildSegment( + IsAsync, + containerClient.Object, + manifestPath, + expectedCursor); // Act SegmentCursor cursor = segment.GetCursor(); // Assert - Assert.AreEqual(dateTime, cursor.SegmentTime); - Assert.AreEqual(1, cursor.ShardCursors.Count); - Assert.AreEqual(shardCursor.Object, cursor.ShardCursors[0]); + Assert.AreEqual(expectedCursor.SegmentTime, cursor.SegmentTime); + Assert.AreEqual(expectedCursor.ShardCursors.Count, cursor.ShardCursors.Count); + for (int i = 0; i < expectedCursor.ShardCursors.Count; i++) + { + Assert.AreEqual(expectedCursor.ShardCursors[i].BlockOffset, cursor.ShardCursors[i].BlockOffset); + Assert.AreEqual(expectedCursor.ShardCursors[i].ChunkIndex, cursor.ShardCursors[i].ChunkIndex); + Assert.AreEqual(expectedCursor.ShardCursors[i].EventIndex, cursor.ShardCursors[i].EventIndex); + } Assert.AreEqual(shardIndex, cursor.ShardIndex); + + containerClient.Verify(r => r.GetBlobClient(manifestPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + for (int i = 0; i < shards.Count; i++) + { + shardFactory.Verify(r => r.BuildShard( + IsAsync, + containerClient.Object, + $"log/0{i}/2020/03/25/0200/", + shardCursors[i])); + } } /// @@ -81,94 +132,129 @@ public void GetCursor() /// We are round-robining the Shards, so we will return the events for /// the shards indexes: 0 1 2 0 1. /// - [Test] - public async Task GetPage() + //[Test] + //public async Task GetPage() + //{ + // // Arrange + // int eventCount = 5; + // int shardCount = 3; + + // List eventIds = new List(); + // for (int i = 0; i < eventCount; i++) + // { + // eventIds.Add(Guid.NewGuid()); + // } + + // List> mockShards = new List>(); + + // for (int i = 0; i (MockBehavior.Strict)); + // } + + // // Set up Shards + // mockShards[0].SetupSequence(r => r.Next(It.IsAny(), default)) + // .Returns(Task.FromResult(new BlobChangeFeedEvent + // { + // Id = eventIds[0] + // })) + // .Returns(Task.FromResult(new BlobChangeFeedEvent + // { + // Id = eventIds[3] + // })); + + // mockShards[0].SetupSequence(r => r.HasNext()) + // .Returns(true) + // .Returns(false); + + // mockShards[1].SetupSequence(r => r.Next(It.IsAny(), default)) + // .Returns(Task.FromResult(new BlobChangeFeedEvent + // { + // Id = eventIds[1] + // })) + // .Returns(Task.FromResult(new BlobChangeFeedEvent + // { + // Id = eventIds[4] + // })); + + // mockShards[1].SetupSequence(r => r.HasNext()) + // .Returns(true) + // .Returns(false); + + // mockShards[2].Setup(r => r.Next(It.IsAny(), default)) + // .Returns(Task.FromResult(new BlobChangeFeedEvent + // { + // Id = eventIds[2] + // })); + + // mockShards[2].Setup(r => r.HasNext()) + // .Returns(false); + + // List shards = new List(); + // for (int i = 0; i < shardCount; i++) + // { + // shards.Add(mockShards[i].Object); + // } + + // Segment segment = new Segment( + // isInitalized: true, + // shards: shards); + + // // Act + // List events = await segment.GetPage(IsAsync, 25); + + // // Assert + // Assert.AreEqual(eventCount, events.Count); + // for (int i = 0; i < eventCount; i++) + // { + // Assert.AreEqual(eventIds[i], events[i].Id); + // } + + // mockShards[0].Verify(r => r.Next(IsAsync, default)); + // mockShards[0].Verify(r => r.HasNext()); + // mockShards[1].Verify(r => r.Next(IsAsync, default)); + // mockShards[1].Verify(r => r.HasNext()); + // mockShards[2].Verify(r => r.Next(IsAsync, default)); + // mockShards[2].Verify(r => r.HasNext()); + // mockShards[0].Verify(r => r.Next(IsAsync, default)); + // mockShards[0].Verify(r => r.HasNext()); + // mockShards[1].Verify(r => r.Next(IsAsync, default)); + // mockShards[1].Verify(r => r.HasNext()); + //} + + private class ResponseImplementation : Response { - // Arrange - int eventCount = 5; - int shardCount = 3; + public override int Status => throw new NotImplementedException(); - List eventIds = new List(); - for (int i = 0; i < eventCount; i++) - { - eventIds.Add(Guid.NewGuid()); - } + public override string ReasonPhrase => throw new NotImplementedException(); - List> mockShards = new List>(); + public override Stream ContentStream { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } + public override string ClientRequestId { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } - for (int i = 0; i (MockBehavior.Strict)); + throw new NotImplementedException(); } - // Set up Shards - mockShards[0].SetupSequence(r => r.Next(It.IsAny(), default)) - .Returns(Task.FromResult(new BlobChangeFeedEvent - { - Id = eventIds[0] - })) - .Returns(Task.FromResult(new BlobChangeFeedEvent - { - Id = eventIds[3] - })); - - mockShards[0].SetupSequence(r => r.HasNext()) - .Returns(true) - .Returns(false); - - mockShards[1].SetupSequence(r => r.Next(It.IsAny(), default)) - .Returns(Task.FromResult(new BlobChangeFeedEvent - { - Id = eventIds[1] - })) - .Returns(Task.FromResult(new BlobChangeFeedEvent - { - Id = eventIds[4] - })); - - mockShards[1].SetupSequence(r => r.HasNext()) - .Returns(true) - .Returns(false); - - mockShards[2].Setup(r => r.Next(It.IsAny(), default)) - .Returns(Task.FromResult(new BlobChangeFeedEvent - { - Id = eventIds[2] - })); - - mockShards[2].Setup(r => r.HasNext()) - .Returns(false); - - List shards = new List(); - for (int i = 0; i < shardCount; i++) + protected override bool ContainsHeader(string name) { - shards.Add(mockShards[i].Object); + throw new NotImplementedException(); } - Segment segment = new Segment( - isInitalized: true, - shards: shards); - - // Act - List events = await segment.GetPage(IsAsync, 25); + protected override IEnumerable EnumerateHeaders() + { + throw new NotImplementedException(); + } - // Assert - Assert.AreEqual(eventCount, events.Count); - for (int i = 0; i < eventCount; i++) + protected override bool TryGetHeader(string name, out string value) { - Assert.AreEqual(eventIds[i], events[i].Id); + throw new NotImplementedException(); } - mockShards[0].Verify(r => r.Next(IsAsync, default)); - mockShards[0].Verify(r => r.HasNext()); - mockShards[1].Verify(r => r.Next(IsAsync, default)); - mockShards[1].Verify(r => r.HasNext()); - mockShards[2].Verify(r => r.Next(IsAsync, default)); - mockShards[2].Verify(r => r.HasNext()); - mockShards[0].Verify(r => r.Next(IsAsync, default)); - mockShards[0].Verify(r => r.HasNext()); - mockShards[1].Verify(r => r.Next(IsAsync, default)); - mockShards[1].Verify(r => r.HasNext()); + protected override bool TryGetHeaderValues(string name, out IEnumerable values) + { + throw new NotImplementedException(); + } } } } From 52be7ed56fcd3bac98e163e028db79c50f47c63f Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 6 May 2020 10:50:36 -0700 Subject: [PATCH 12/30] Finished adding Factories to Segment --- .../tests/Resources/SegmentManifest.json | 4 +- .../tests/SegmentTests.cs | 239 ++++++++++-------- 2 files changed, 141 insertions(+), 102 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json index 7c200036f7a06..21b93ea966a30 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/SegmentManifest.json @@ -14,9 +14,7 @@ "chunkFilePaths": [ "$blobchangefeed/log/00/2020/03/25/0200/", "$blobchangefeed/log/01/2020/03/25/0200/", - "$blobchangefeed/log/02/2020/03/25/0200/", - "$blobchangefeed/log/03/2020/03/25/0200/", - "$blobchangefeed/log/04/2020/03/25/0200/" + "$blobchangefeed/log/02/2020/03/25/0200/" ], "storageDiagnostics": { "version": 0, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 7a05ed616f3db..41d7b654d4d72 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -31,7 +31,8 @@ public async Task GetCursor() Mock shardFactory = new Mock(MockBehavior.Strict); List> shards = new List>(); - for (int i = 0; i < 5; i++) + int shardCount = 3; + for (int i = 0; i < shardCount; i++) { shards.Add(new Mock(MockBehavior.Strict)); } @@ -40,13 +41,11 @@ public async Task GetCursor() { new ShardCursor(1, 2, 3), new ShardCursor(4, 5, 6), - new ShardCursor(7, 8, 9), - new ShardCursor(10, 11, 12), - new ShardCursor(13, 14, 15) + new ShardCursor(7, 8, 9) }; DateTimeOffset dateTime = new DateTimeOffset(2020, 3, 25, 2, 0, 0, TimeSpan.Zero); - int shardIndex = 4; + int shardIndex = 1; SegmentCursor expectedCursor = new SegmentCursor( dateTime, @@ -75,11 +74,9 @@ public async Task GetCursor() It.IsAny())) .ReturnsAsync(shards[0].Object) .ReturnsAsync(shards[1].Object) - .ReturnsAsync(shards[2].Object) - .ReturnsAsync(shards[3].Object) - .ReturnsAsync(shards[4].Object); + .ReturnsAsync(shards[2].Object); - for (int i = 0; i < shards.Count; i++) + for (int i = 0; i < shardCount; i++) { shards[i].Setup(r => r.GetCursor()).Returns(shardCursors[i]); } @@ -97,7 +94,7 @@ public async Task GetCursor() // Assert Assert.AreEqual(expectedCursor.SegmentTime, cursor.SegmentTime); Assert.AreEqual(expectedCursor.ShardCursors.Count, cursor.ShardCursors.Count); - for (int i = 0; i < expectedCursor.ShardCursors.Count; i++) + for (int i = 0; i < shardCount; i++) { Assert.AreEqual(expectedCursor.ShardCursors[i].BlockOffset, cursor.ShardCursors[i].BlockOffset); Assert.AreEqual(expectedCursor.ShardCursors[i].ChunkIndex, cursor.ShardCursors[i].ChunkIndex); @@ -132,95 +129,139 @@ public async Task GetCursor() /// We are round-robining the Shards, so we will return the events for /// the shards indexes: 0 1 2 0 1. /// - //[Test] - //public async Task GetPage() - //{ - // // Arrange - // int eventCount = 5; - // int shardCount = 3; - - // List eventIds = new List(); - // for (int i = 0; i < eventCount; i++) - // { - // eventIds.Add(Guid.NewGuid()); - // } - - // List> mockShards = new List>(); - - // for (int i = 0; i (MockBehavior.Strict)); - // } - - // // Set up Shards - // mockShards[0].SetupSequence(r => r.Next(It.IsAny(), default)) - // .Returns(Task.FromResult(new BlobChangeFeedEvent - // { - // Id = eventIds[0] - // })) - // .Returns(Task.FromResult(new BlobChangeFeedEvent - // { - // Id = eventIds[3] - // })); - - // mockShards[0].SetupSequence(r => r.HasNext()) - // .Returns(true) - // .Returns(false); - - // mockShards[1].SetupSequence(r => r.Next(It.IsAny(), default)) - // .Returns(Task.FromResult(new BlobChangeFeedEvent - // { - // Id = eventIds[1] - // })) - // .Returns(Task.FromResult(new BlobChangeFeedEvent - // { - // Id = eventIds[4] - // })); - - // mockShards[1].SetupSequence(r => r.HasNext()) - // .Returns(true) - // .Returns(false); - - // mockShards[2].Setup(r => r.Next(It.IsAny(), default)) - // .Returns(Task.FromResult(new BlobChangeFeedEvent - // { - // Id = eventIds[2] - // })); - - // mockShards[2].Setup(r => r.HasNext()) - // .Returns(false); - - // List shards = new List(); - // for (int i = 0; i < shardCount; i++) - // { - // shards.Add(mockShards[i].Object); - // } - - // Segment segment = new Segment( - // isInitalized: true, - // shards: shards); - - // // Act - // List events = await segment.GetPage(IsAsync, 25); - - // // Assert - // Assert.AreEqual(eventCount, events.Count); - // for (int i = 0; i < eventCount; i++) - // { - // Assert.AreEqual(eventIds[i], events[i].Id); - // } - - // mockShards[0].Verify(r => r.Next(IsAsync, default)); - // mockShards[0].Verify(r => r.HasNext()); - // mockShards[1].Verify(r => r.Next(IsAsync, default)); - // mockShards[1].Verify(r => r.HasNext()); - // mockShards[2].Verify(r => r.Next(IsAsync, default)); - // mockShards[2].Verify(r => r.HasNext()); - // mockShards[0].Verify(r => r.Next(IsAsync, default)); - // mockShards[0].Verify(r => r.HasNext()); - // mockShards[1].Verify(r => r.Next(IsAsync, default)); - // mockShards[1].Verify(r => r.HasNext()); - //} + [Test] + public async Task GetPage() + { + // Arrange + string manifestPath = "idx/segments/2020/03/25/0200/meta.json"; + int shardCount = 3; + int eventCount = 5; + + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock shardFactory = new Mock(MockBehavior.Strict); + + List> shards = new List>(); + + for (int i = 0; i < shardCount; i++) + { + shards.Add(new Mock(MockBehavior.Strict)); + } + + List eventIds = new List(); + for (int i = 0; i < eventCount; i++) + { + eventIds.Add(Guid.NewGuid()); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new ResponseImplementation()); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + shardFactory.SetupSequence(r => r.BuildShard( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(shards[0].Object) + .ReturnsAsync(shards[1].Object) + .ReturnsAsync(shards[2].Object); + + // Set up Shards + shards[0].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[0] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[3] + })); + + shards[0].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + shards[1].SetupSequence(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[1] + })) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[4] + })); + + shards[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + shards[2].Setup(r => r.Next(It.IsAny(), default)) + .Returns(Task.FromResult(new BlobChangeFeedEvent + { + Id = eventIds[2] + })); + + shards[2].Setup(r => r.HasNext()) + .Returns(false); + + SegmentFactory segmentFactory = new SegmentFactory(shardFactory.Object); + Segment segment = await segmentFactory.BuildSegment( + IsAsync, + containerClient.Object, + manifestPath); + + // Act + List events = await segment.GetPage(IsAsync, 25); + + // Assert + Assert.AreEqual(eventCount, events.Count); + for (int i = 0; i < eventCount; i++) + { + Assert.AreEqual(eventIds[i], events[i].Id); + } + + containerClient.Verify(r => r.GetBlobClient(manifestPath)); + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + for (int i = 0; i < shards.Count; i++) + { + shardFactory.Verify(r => r.BuildShard( + IsAsync, + containerClient.Object, + $"log/0{i}/2020/03/25/0200/", + default)); + } + + shards[0].Verify(r => r.Next(IsAsync, default)); + shards[0].Verify(r => r.HasNext()); + shards[1].Verify(r => r.Next(IsAsync, default)); + shards[1].Verify(r => r.HasNext()); + shards[2].Verify(r => r.Next(IsAsync, default)); + shards[2].Verify(r => r.HasNext()); + shards[0].Verify(r => r.Next(IsAsync, default)); + shards[0].Verify(r => r.HasNext()); + shards[1].Verify(r => r.Next(IsAsync, default)); + shards[1].Verify(r => r.HasNext()); + } private class ResponseImplementation : Response { From a1ec00f28b70aaf4bd9607f7049bf3350452676e Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 6 May 2020 12:50:49 -0700 Subject: [PATCH 13/30] Half way through adding Factories to Change Feed --- .../src/BlobChangeFeedAsyncPagable.cs | 32 ++- .../src/BlobChangeFeedExtensions.cs | 61 ++++- .../src/BlobChangeFeedPagable.cs | 37 ++- .../src/ChangeFeed.cs | 221 ++---------------- .../src/ChangeFeedFactory.cs | 192 +++++++++++++++ .../tests/BlobChangeFeedAsyncPagableTests.cs | 2 +- .../tests/BlobChangeFeedExtensionsTests.cs | 95 +++++++- .../tests/ChangeFeedFactoryTests.cs | 79 +++++++ .../tests/ChangeFeedTestBase.cs | 2 +- .../tests/ChangeFeedTests.cs | 140 ----------- 10 files changed, 502 insertions(+), 359 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs index 263a3a93b35bf..03091d9c2ad8d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs @@ -20,6 +20,12 @@ namespace Azure.Storage.Blobs.ChangeFeed /// public class BlobChangeFeedAsyncPagable : AsyncPageable { + private readonly ChangeFeedFactory _changeFeedFactory; + private readonly BlobServiceClient _blobServiceClient; + private readonly DateTimeOffset? _startTime; + private readonly DateTimeOffset? _endTime; + private readonly string _continuation; + private ChangeFeed _changeFeed; /// @@ -30,19 +36,19 @@ internal BlobChangeFeedAsyncPagable( DateTimeOffset? startTime = default, DateTimeOffset? endTime = default) { - _changeFeed = new ChangeFeed( - blobBerviceClient, - startTime, - endTime); + _changeFeedFactory = new ChangeFeedFactory(); + _blobServiceClient = blobBerviceClient; + _startTime = startTime; + _endTime = endTime; } internal BlobChangeFeedAsyncPagable( BlobServiceClient blobServiceClient, string continuation) { - _changeFeed = new ChangeFeed( - blobServiceClient, - continuation); + _changeFeedFactory = new ChangeFeedFactory(); + _blobServiceClient = blobServiceClient; + _continuation = continuation; } /// @@ -55,6 +61,16 @@ public override async IAsyncEnumerable> AsPages( string continuationToken = null, int? pageSizeHint = null) { + if (_changeFeed == null) + { + _changeFeed = await _changeFeedFactory.BuildChangeFeed( + async: true, + _blobServiceClient, + _startTime, + _endTime, + _continuation) + .ConfigureAwait(false); + } while (_changeFeed.HasNext()) { yield return await _changeFeed.GetPage( @@ -63,4 +79,4 @@ public override async IAsyncEnumerable> AsPages( } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs index cd711c350c154..eeeb545b26545 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs @@ -2,8 +2,11 @@ // Licensed under the MIT License. using System; +using System.Collections.Generic; using System.Globalization; +using System.Threading.Tasks; using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; namespace Azure.Storage.Blobs.ChangeFeed { @@ -100,5 +103,61 @@ public static BlobChangeFeedClient GetChangeFeedClient(this BlobServiceClient se second: 0, offset: TimeSpan.Zero); } + + internal static async Task> GetSegmentsInYear( + bool async, + BlobContainerClient containerClient, + string yearPath, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + prefix: yearPath) + .ConfigureAwait(false)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + prefix: yearPath)) + { + if (blobHierarchyItem.IsPrefix) + continue; + + DateTimeOffset segmentDateTime = blobHierarchyItem.Blob.Name.ToDateTimeOffset().Value; + if (startTime.HasValue && segmentDateTime < startTime + || endTime.HasValue && segmentDateTime > endTime) + continue; + + list.Add(blobHierarchyItem.Blob.Name); + } + } + + return new Queue(list); + } + + internal static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTimeOffset? endDate) + { + if (endDate.HasValue && endDate.Value < lastConsumable) + { + return endDate.Value; + } + + return lastConsumable; + } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs index 8201d5f1128b3..bdf0291799db8 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs @@ -16,26 +16,32 @@ namespace Azure.Storage.Blobs.ChangeFeed /// public class BlobChangeFeedPagable : Pageable { + private readonly ChangeFeedFactory _changeFeedFactory; + private readonly BlobServiceClient _blobServiceClient; + private readonly DateTimeOffset? _startTime; + private readonly DateTimeOffset? _endTime; + private readonly string _continuation; + private ChangeFeed _changeFeed; internal BlobChangeFeedPagable( - BlobServiceClient serviceClient, + BlobServiceClient blobBerviceClient, DateTimeOffset? startTime = default, DateTimeOffset? endTime = default) { - _changeFeed = new ChangeFeed( - serviceClient, - startTime, - endTime); + _changeFeedFactory = new ChangeFeedFactory(); + _blobServiceClient = blobBerviceClient; + _startTime = startTime; + _endTime = endTime; } internal BlobChangeFeedPagable( - BlobServiceClient serviceClient, + BlobServiceClient blobBerviceClient, string continuation) { - _changeFeed = new ChangeFeed( - serviceClient, - continuation); + _changeFeedFactory = new ChangeFeedFactory(); + _blobServiceClient = blobBerviceClient; + _continuation = continuation; } /// @@ -46,6 +52,17 @@ internal BlobChangeFeedPagable( /// public override IEnumerable> AsPages(string continuationToken = null, int? pageSizeHint = null) { + if (_changeFeed == null) + { + _changeFeed = _changeFeedFactory.BuildChangeFeed( + async: false, + _blobServiceClient, + _startTime, + _endTime, + _continuation) + .EnsureCompleted(); + } + while (_changeFeed.HasNext()) { yield return _changeFeed.GetPage( @@ -54,4 +71,4 @@ public override IEnumerable> AsPages(string continuati } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index b638cf5b0ab69..d1531433da75e 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -3,12 +3,8 @@ using System; using System.Collections.Generic; -using System.Globalization; -using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Azure.Core.Pipeline; -using Azure.Storage.Blobs; using Azure.Storage.Blobs.Models; using Azure.Storage.Blobs.ChangeFeed.Models; using System.Threading; @@ -22,10 +18,15 @@ internal class ChangeFeed /// private readonly BlobContainerClient _containerClient; + /// + /// A for creating new s. + /// + private readonly SegmentFactory _segmentFactory; + /// /// Queue of paths to years we haven't processed yet. /// - private Queue _years; + private readonly Queue _years; /// /// Paths to segments in the current year we haven't processed yet. @@ -37,12 +38,9 @@ internal class ChangeFeed /// private Segment _currentSegment; - private readonly SegmentCursor _currentSegmentCursor; - /// /// The latest time the Change Feed can safely be read from. /// - //TODO this can advance while we are iterating through the Change Feed. Figure out how to support this. private DateTimeOffset _lastConsumable; /// @@ -57,160 +55,47 @@ internal class ChangeFeed /// private DateTimeOffset? _endTime; - /// - /// If this ChangeFeed has been initalized. - /// - private bool _isInitalized; - - private readonly SegmentFactory _segmentFactory; - - // Start time will be rounded down to the nearest hour. public ChangeFeed( - BlobServiceClient blobServiceClient, - DateTimeOffset? startTime = default, - DateTimeOffset? endTime = default) - { - _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); - _years = new Queue(); - _segments = new Queue(); - _isInitalized = false; - _startTime = startTime.RoundDownToNearestHour(); - _endTime = endTime.RoundUpToNearestHour(); - _segmentFactory = new SegmentFactory(new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory()))); - } - - public ChangeFeed( - BlobServiceClient blobServiceClient, - string continutation) + BlobContainerClient containerClient, + SegmentFactory segmentFactory, + Queue years, + Queue segments, + Segment currentSegment, + DateTimeOffset lastConsumable, + DateTimeOffset? startTime, + DateTimeOffset? endTime) { - _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); - ChangeFeedCursor cursor = JsonSerializer.Deserialize(continutation); - ValidateCursor(_containerClient, cursor); - _years = new Queue(); - _segments = new Queue(); - _isInitalized = false; - _startTime = cursor.CurrentSegmentCursor.SegmentTime; - _endTime = cursor.EndTime; - _currentSegmentCursor = cursor.CurrentSegmentCursor; - _segmentFactory = new SegmentFactory(new ShardFactory(new ChunkFactory(new LazyLoadingBlobStreamFactory(), new AvroReaderFactory()))); + _containerClient = containerClient; + _segmentFactory = segmentFactory; + _years = years; + _segments = segments; + _currentSegment = currentSegment; + _lastConsumable = lastConsumable; + _startTime = startTime; + _endTime = endTime; } /// - /// Internal constructor for unit tests. + /// Constructor for mocking, and for creating a Change Feed with no Events. /// - /// - internal ChangeFeed( - BlobContainerClient containerClient) - { - _containerClient = containerClient; - } - - private async Task Initalize(bool async) - { - // Check if Change Feed has been abled for this account. - bool changeFeedContainerExists; - - if (async) - { - changeFeedContainerExists = await _containerClient.ExistsAsync().ConfigureAwait(false); - } - else - { - changeFeedContainerExists = _containerClient.Exists(); - } + public ChangeFeed() { } - if (!changeFeedContainerExists) - { - //TODO improve this error message - throw new ArgumentException("Change Feed hasn't been enabled on this account, or is current being enabled."); - } - - // Get last consumable - BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); - BlobDownloadInfo blobDownloadInfo; - if (async) - { - blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); - } - else - { - blobDownloadInfo = blobClient.Download(); - } - - JsonDocument jsonMetaSegment; - if (async) - { - jsonMetaSegment = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); - } - else - { - jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content); - } - - //TODO what happens when _lastConsumable advances an hour? - _lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); - - // Get year paths - _years = await GetYearPaths(async).ConfigureAwait(false); - - // Dequeue any years that occur before start time - if (_startTime.HasValue) - { - while (_years.Count > 0 - && _years.Peek().ToDateTimeOffset() < _startTime.RoundDownToNearestYear()) - { - _years.Dequeue(); - } - } - - if (_years.Count == 0) - { - return; - } - - string firstYearPath = _years.Dequeue(); - - // Get Segments for first year - _segments = await GetSegmentsInYear( - async: async, - yearPath: firstYearPath, - startTime: _startTime, - endTime: MinDateTime(_lastConsumable, _endTime)) - .ConfigureAwait(false); - - _currentSegment = await _segmentFactory.BuildSegment( - async, - _containerClient, - _segments.Dequeue(), - _currentSegmentCursor) - .ConfigureAwait(false); - _isInitalized = true; - } - - //TODO current round robin strategy doesn't work for live streaming! // The last segment may still be adding chunks. public async Task> GetPage( bool async, int pageSize = 512, CancellationToken cancellationToken = default) { - if (!_isInitalized) - { - await Initalize(async).ConfigureAwait(false); - } - if (!HasNext()) { throw new InvalidOperationException("Change feed doesn't have any more events"); } - //TODO what should we return here? Also do we really need to check this on every page? if (_currentSegment.DateTime > _endTime) { return new BlobChangeFeedEventPage(); } - //TODO what should we return here? Also do we really need to check this on every page? if (_currentSegment.DateTime > _lastConsumable) { return new BlobChangeFeedEventPage(); @@ -240,18 +125,13 @@ public async Task> GetPage( public bool HasNext() { - if (!_isInitalized) - { - return true; - } - // We have no more segments, years, and the current segment doesn't have hext. if (_segments.Count == 0 && _years.Count == 0 && !_currentSegment.HasNext()) { return false; } - DateTimeOffset end = MinDateTime(_lastConsumable, _endTime); + DateTimeOffset end = BlobChangeFeedExtensions.MinDateTime(_lastConsumable, _endTime); return _currentSegment.DateTime <= end; } @@ -347,56 +227,5 @@ private async Task AdvanceSegmentIfNecessary(bool async) } } } - - internal async Task> GetYearPaths(bool async) - { - List list = new List(); - - if (async) - { - await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( - prefix: Constants.ChangeFeed.SegmentPrefix, - delimiter: "/").ConfigureAwait(false)) - { - if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) - continue; - - list.Add(blobHierarchyItem.Prefix); - } - } - else - { - foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( - prefix: Constants.ChangeFeed.SegmentPrefix, - delimiter: "/")) - { - if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) - continue; - - list.Add(blobHierarchyItem.Prefix); - } - } - return new Queue(list); - } - - private static DateTimeOffset MinDateTime(DateTimeOffset lastConsumable, DateTimeOffset? endDate) - { - if (endDate.HasValue && endDate.Value < lastConsumable) - { - return endDate.Value; - } - - return lastConsumable; - } - - private static void ValidateCursor( - BlobContainerClient containerClient, - ChangeFeedCursor cursor) - { - if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash) - { - throw new ArgumentException("Cursor URL does not match container URL"); - } - } } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs new file mode 100644 index 0000000000000..6ec3584abbe71 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs @@ -0,0 +1,192 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Storage.Blobs.ChangeFeed.Models; +using Azure.Storage.Blobs.Models; + +namespace Azure.Storage.Blobs.ChangeFeed +{ + internal class ChangeFeedFactory + { + private readonly SegmentFactory _segmentFactory; + + public ChangeFeedFactory() + { + _segmentFactory = new SegmentFactory( + new ShardFactory( + new ChunkFactory( + new LazyLoadingBlobStreamFactory(), + new AvroReaderFactory()))); + } + + public ChangeFeedFactory(SegmentFactory segmentFactory) + { + _segmentFactory = segmentFactory; + } + + public async Task BuildChangeFeed( + bool async, + BlobServiceClient blobServiceClient, + DateTimeOffset? startTime = default, + DateTimeOffset? endTime = default, + string continuation = default) + { + BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); + DateTimeOffset lastConsumable; + Queue years; + Queue segments; + ChangeFeedCursor cursor = null; + + // Create cursor + if (continuation != null) + { + cursor = JsonSerializer.Deserialize(continuation); + ValidateCursor(containerClient, cursor); + startTime = cursor.CurrentSegmentCursor.SegmentTime; + endTime = cursor.EndTime; + } + // Round start and end time if we are not using the cursor. + else + { + startTime = startTime.RoundDownToNearestHour(); + endTime = endTime.RoundUpToNearestHour(); + } + + // Check if Change Feed has been abled for this account. + bool changeFeedContainerExists; + + if (async) + { + changeFeedContainerExists = await containerClient.ExistsAsync().ConfigureAwait(false); + } + else + { + changeFeedContainerExists = containerClient.Exists(); + } + + if (!changeFeedContainerExists) + { + //TODO improve this error message + throw new ArgumentException("Change Feed hasn't been enabled on this account, or is current being enabled."); + } + + // Get last consumable + BlobClient blobClient = containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); + BlobDownloadInfo blobDownloadInfo; + if (async) + { + blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); + } + else + { + blobDownloadInfo = blobClient.Download(); + } + + JsonDocument jsonMetaSegment; + if (async) + { + jsonMetaSegment = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); + } + else + { + jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content); + } + + lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); + + // Get year paths + years = await GetYearPaths(async, containerClient).ConfigureAwait(false); + + // Dequeue any years that occur before start time + if (startTime.HasValue) + { + while (years.Count > 0 + && years.Peek().ToDateTimeOffset() < startTime.RoundDownToNearestYear()) + { + years.Dequeue(); + } + } + + if (years.Count == 0) + { + return new ChangeFeed(); + } + + string firstYearPath = years.Dequeue(); + + // Get Segments for first year + segments = await BlobChangeFeedExtensions.GetSegmentsInYear( + async: async, + containerClient: containerClient, + yearPath: firstYearPath, + startTime: startTime, + endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime)) + .ConfigureAwait(false); + + Segment currentSegment = await _segmentFactory.BuildSegment( + async, + containerClient, + segments.Dequeue(), + cursor?.CurrentSegmentCursor) + .ConfigureAwait(false); + + return new ChangeFeed( + containerClient, + _segmentFactory, + years, + segments, + currentSegment, + lastConsumable, + startTime, + endTime); + } + + private static void ValidateCursor( + BlobContainerClient containerClient, + ChangeFeedCursor cursor) + { + if (containerClient.Uri.ToString().GetHashCode() != cursor.UrlHash) + { + throw new ArgumentException("Cursor URL does not match container URL"); + } + } + + internal static async Task> GetYearPaths( + bool async, + BlobContainerClient containerClient) + { + List list = new List(); + + if (async) + { + await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/").ConfigureAwait(false)) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + else + { + foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + prefix: Constants.ChangeFeed.SegmentPrefix, + delimiter: "/")) + { + if (blobHierarchyItem.Prefix.Contains(Constants.ChangeFeed.InitalizationSegment)) + continue; + + list.Add(blobHierarchyItem.Prefix); + } + } + return new Queue(list); + } + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index 17ffee3b7a3cb..2da82aa96d502 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -61,7 +61,7 @@ IAsyncEnumerator> asyncEnumerator } [Test] - [Ignore("")] + //[Ignore("")] public async Task CursorTest() { BlobServiceClient service = GetServiceClient_SharedKey(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs index 22660f9829562..cbfd2aba27ca7 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs @@ -4,12 +4,21 @@ using System; using System.Collections.Generic; using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs.Models; +using Moq; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Tests { - public class BlobChangeFeedExtensionsTests + public class BlobChangeFeedExtensionsTests : ChangeFeedTestBase { + public BlobChangeFeedExtensionsTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + [Test] public void ToDateTimeOffsetTests() { @@ -95,5 +104,87 @@ public void RoundDownToNearestYearTests() null, ((DateTimeOffset?)null).RoundDownToNearestYear()); } + + + [Test] + public async Task GetSegmentsInYearTest() + { + // Arrange + Mock containerClient = new Mock(); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)).Returns(pageable); + } + + // Act + Queue segmentPaths = await BlobChangeFeedExtensions.GetSegmentsInYear( + IsAsync, + containerClient.Object, + "idx/segments/2020/", + startTime: new DateTimeOffset(2020, 3, 3, 0, 0, 0, TimeSpan.Zero), + endTime: new DateTimeOffset(2020, 3, 3, 22, 0, 0, TimeSpan.Zero)); + + // Assert + Queue expectedSegmentPaths = new Queue(); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/0000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/1800/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2000/meta.json"); + expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2200/meta.json"); + + Assert.AreEqual(expectedSegmentPaths, segmentPaths); + } + + private static Task> GetSegmentsInYearFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); + + private static Page GetSegmentsInYearFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), + }); } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs new file mode 100644 index 0000000000000..ae4cf444f60cb --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Storage.Blobs.Models; +using Moq; +using NUnit.Framework; + +namespace Azure.Storage.Blobs.ChangeFeed.Tests +{ + public class ChangeFeedFactoryTests : ChangeFeedTestBase + { + public ChangeFeedFactoryTests(bool async) + : base(async, null /* RecordedTestMode.Record /* to re-record */) + { + } + + [Test] + public async Task GetYearPathsTest() + { + // Arrange + Mock containerClient = new Mock(MockBehavior.Strict); + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + // Act + Queue years = await ChangeFeedFactory.GetYearPaths(IsAsync, containerClient.Object).ConfigureAwait(false); + + // Assert + Queue expectedYears = new Queue(); + expectedYears.Enqueue("idx/segments/2019/"); + expectedYears.Enqueue("idx/segments/2020/"); + expectedYears.Enqueue("idx/segments/2022/"); + expectedYears.Enqueue("idx/segments/2023/"); + Assert.AreEqual(expectedYears, years); + } + + private static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); + + private static Page GetYearPathFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), + }); + } +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index 115ac85c5f2a9..70ff6d005ef74 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -18,7 +18,7 @@ public class ChangeFeedTestBase : StorageTestBase public ChangeFeedTestBase(bool async) : this(async, null) { } public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) - : base(async, mode) + : base(async, RecordedTestMode.Live) { } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index c7b18c451bb2f..39b26a66611c0 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -19,145 +19,5 @@ public ChangeFeedTests(bool async) : base(async, null /* RecordedTestMode.Record /* to re-record */) { } - - [Test] - public async Task GetYearPathsTest() - { - // Arrange - Mock containerClient = new Mock(); - ChangeFeed changeFeed = new ChangeFeed(containerClient.Object); - - if (IsAsync) - { - AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); - - containerClient.Setup(r => r.GetBlobsByHierarchyAsync( - default, - default, - "/", - Constants.ChangeFeed.SegmentPrefix, - default)).Returns(asyncPageable); - } - else - { - Pageable pageable = - PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); - - containerClient.Setup(r => r.GetBlobsByHierarchy( - default, - default, - "/", - Constants.ChangeFeed.SegmentPrefix, - default)).Returns(pageable); - } - - // Act - Queue years = await changeFeed.GetYearPaths(IsAsync).ConfigureAwait(false); - - // Assert - Queue expectedYears = new Queue(); - expectedYears.Enqueue("idx/segments/2019/"); - expectedYears.Enqueue("idx/segments/2020/"); - expectedYears.Enqueue("idx/segments/2022/"); - expectedYears.Enqueue("idx/segments/2023/"); - Assert.AreEqual(expectedYears, years); - - } - - private static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) - => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); - - private static Page GetYearPathFunc( - string continuation, - int? pageSizeHint) - => new BlobHierarchyItemPage(new List - { - BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), - }); - - [Test] - public async Task GetSegmentsInYearTest() - { - // Arrange - Mock containerClient = new Mock(); - ChangeFeed changeFeed = new ChangeFeed(containerClient.Object); - - if (IsAsync) - { - AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); - - containerClient.Setup(r => r.GetBlobsByHierarchyAsync( - default, - default, - default, - "idx/segments/2020/", - default)).Returns(asyncPageable); - } - else - { - Pageable pageable = - PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); - - containerClient.Setup(r => r.GetBlobsByHierarchy( - default, - default, - default, - "idx/segments/2020/", - default)).Returns(pageable); - } - - // Act - Queue segmentPaths = await changeFeed.GetSegmentsInYear( - IsAsync, - "idx/segments/2020/", - startTime: new DateTimeOffset(2020, 3, 3, 0, 0, 0, TimeSpan.Zero), - endTime: new DateTimeOffset(2020, 3, 3, 22, 0 , 0, TimeSpan.Zero)); - - // Assert - Queue expectedSegmentPaths = new Queue(); - expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/0000/meta.json"); - expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/1800/meta.json"); - expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2000/meta.json"); - expectedSegmentPaths.Enqueue("idx/segments/2020/03/03/2200/meta.json"); - - Assert.AreEqual(expectedSegmentPaths, segmentPaths); - } - - private static Task> GetSegmentsInYearFuncAsync( - string continuation, - int? pageSizeHint) - => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); - - private static Page GetSegmentsInYearFunc( - string continuation, - int? pageSizeHint) - => new BlobHierarchyItemPage(new List - { - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), - }); } } From 265c1e80d0e0cf4bbed2eb1c168059d09f83598b Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 7 May 2020 10:05:49 -0700 Subject: [PATCH 14/30] Finshed adding Factories to Change Feed --- .../src/ChangeFeed.cs | 13 +- .../src/Segment.cs | 8 +- .../tests/BlobChangeFeedExtensionsTests.cs | 33 - .../tests/ChangeFeedFactoryTests.cs | 15 - .../tests/ChangeFeedTestBase.cs | 48 ++ .../tests/ChangeFeedTests.cs | 624 ++++++++++++++++++ .../tests/Resources/ChangeFeedManifest.json | 12 + .../tests/SegmentTests.cs | 40 +- .../src/Shared/Constants.cs | 1 + 9 files changed, 699 insertions(+), 95 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index d1531433da75e..877268f8505f2 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -83,7 +83,7 @@ public ChangeFeed() { } // The last segment may still be adding chunks. public async Task> GetPage( bool async, - int pageSize = 512, + int pageSize = Constants.ChangeFeed.DefaultPageSize, CancellationToken cancellationToken = default) { if (!HasNext()) @@ -121,8 +121,6 @@ public async Task> GetPage( return new BlobChangeFeedEventPage(blobChangeFeedEvents, JsonSerializer.Serialize(GetCursor())); } - - public bool HasNext() { // We have no more segments, years, and the current segment doesn't have hext. @@ -136,7 +134,6 @@ public bool HasNext() return _currentSegment.DateTime <= end; } - //TODO how do update this? public DateTimeOffset LastConsumable() { return _lastConsumable; @@ -195,8 +192,14 @@ internal async Task> GetSegmentsInYear( private async Task AdvanceSegmentIfNecessary(bool async) { + // If the current segment has more Events, we don't need to do anything. + if (_currentSegment.HasNext()) + { + return; + } + // If the current segment is completed, remove it - if (!_currentSegment.HasNext() && _segments.Count > 0) + if (_segments.Count > 0) { _currentSegment = await _segmentFactory.BuildSegment( async, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index a923334ba0e20..3c099e722179f 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -52,7 +52,7 @@ public Segment( Finalized = finalized; } - public SegmentCursor GetCursor() + public virtual SegmentCursor GetCursor() { List shardCursors = new List(); foreach (Shard shard in _shards) @@ -65,7 +65,7 @@ public SegmentCursor GetCursor() shardIndex: _shardIndex); } - public async Task> GetPage( + public virtual async Task> GetPage( bool async, int? pageSize, CancellationToken cancellationToken = default) @@ -100,12 +100,10 @@ public async Task> GetPage( } } - //TODO how to get raw response for page? Does it matter? return changeFeedEventList; } - //TODO figure out if this is right. - public bool HasNext() + public virtual bool HasNext() => _shards.Count > 0; /// diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs index cbfd2aba27ca7..631633df5223d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedExtensionsTests.cs @@ -153,38 +153,5 @@ public async Task GetSegmentsInYearTest() Assert.AreEqual(expectedSegmentPaths, segmentPaths); } - - private static Task> GetSegmentsInYearFuncAsync( - string continuation, - int? pageSizeHint) - => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); - - private static Page GetSegmentsInYearFunc( - string continuation, - int? pageSizeHint) - => new BlobHierarchyItemPage(new List - { - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), - BlobsModelFactory.BlobHierarchyItem( - null, - BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), - }); } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs index ae4cf444f60cb..6b1d5fd1417c1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs @@ -60,20 +60,5 @@ public async Task GetYearPathsTest() expectedYears.Enqueue("idx/segments/2023/"); Assert.AreEqual(expectedYears, years); } - - private static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) - => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); - - private static Page GetYearPathFunc( - string continuation, - int? pageSizeHint) - => new BlobHierarchyItemPage(new List - { - BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), - BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), - }); } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index 70ff6d005ef74..6c282adecccb1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -103,6 +103,54 @@ public async ValueTask DisposeAsync() } } + public static Task> GetYearsPathFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearPathFunc(continuation, pageSizeHint)); + + public static Page GetYearPathFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2022/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2023/", null), + }); + + public static Task> GetSegmentsInYearFuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYearFunc(continuation, pageSizeHint)); + + public static Page GetSegmentsInYearFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/01/16/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/02/2300/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/0000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/1800/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/05/1700/meta.json", false, null)), + }); + public class BlobHierarchyItemPage : Page { private List _items; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 39b26a66611c0..80146be3cdfe4 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -3,10 +3,14 @@ using System; using System.Collections.Generic; +using System.IO; using System.Text; +using System.Text.Json; using System.Threading.Tasks; using Azure.Core; +using Azure.Core.TestFramework; using Azure.Storage.Blobs; +using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Blobs.Models; using Moq; using NUnit.Framework; @@ -19,5 +23,625 @@ public ChangeFeedTests(bool async) : base(async, null /* RecordedTestMode.Record /* to re-record */) { } + + [Test] + public async Task GetCursor() + { + // Arrange + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Mock segment = new Mock(MockBehavior.Strict); + + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.Setup(r => r.Uri).Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)).Returns(pageable); + } + + segmentFactory.Setup(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(segment.Object); + + long chunkIndex = 1; + long blockOffset = 2; + long eventIndex = 3; + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); + int shardIndex = 0; + SegmentCursor segmentCursor = new SegmentCursor( + segmentTime, + new List + { + shardCursor + }, + shardIndex); + + segment.Setup(r => r.GetCursor()).Returns(segmentCursor); + + DateTimeOffset endDateTime = new DateTimeOffset(2020, 5, 6, 18, 0, 0, TimeSpan.Zero); + ChangeFeedCursor expectedCursor = new ChangeFeedCursor( + urlHash: containerUri.GetHashCode(), + endDateTime: endDateTime, + currentSegmentCursor: segmentCursor); + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + + // Act + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + serviceClient.Object, + continuation: JsonSerializer.Serialize(expectedCursor)); + + ChangeFeedCursor actualCursor = changeFeed.GetCursor(); + + // Assert + Assert.AreEqual(expectedCursor.CursorVersion, actualCursor.CursorVersion); + Assert.AreEqual(expectedCursor.EndTime, actualCursor.EndTime); + Assert.AreEqual(expectedCursor.UrlHash, actualCursor.UrlHash); + + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.SegmentTime, actualCursor.CurrentSegmentCursor.SegmentTime); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardIndex, actualCursor.CurrentSegmentCursor.ShardIndex); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors.Count, actualCursor.CurrentSegmentCursor.ShardCursors.Count); + + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset, actualCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex); + Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex); + + serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + segmentFactory.Verify(r => r.BuildSegment( + IsAsync, + containerClient.Object, + "idx/segments/2020/01/16/2300/meta.json", + It.Is( + r => r.SegmentTime == segmentTime + && r.ShardIndex == shardIndex + && r.ShardCursors.Count == 1 + && r.ShardCursors[0].BlockOffset == blockOffset + && r.ShardCursors[0].ChunkIndex == chunkIndex + && r.ShardCursors[0].EventIndex == eventIndex + ))); + + segment.Verify(r => r.GetCursor()); + } + + /// + /// This test has 8 total events, 4 segments, and 2 years. + /// We call ChangeFeed.GetPage() with a page size of 3, and then no page size, + /// resulting in two pages with 3 and 5 Events. + /// + /// + [Test] + public async Task GetPage() + { + // Arrange + int eventCount = 8; + int segmentCount = 4; + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + List> segments = new List>(); + for (int i = 0; i < segmentCount; i++) + { + segments.Add(new Mock(MockBehavior.Strict)); + } + + // ChangeFeedFactory.BuildChangeFeed() setups. + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); + AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)) + .Returns(asyncPageable) + .Returns(asyncPageable2); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); + + Pageable pageable2 = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)) + .Returns(pageable) + .Returns(pageable2); + } + + segmentFactory.SetupSequence(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(segments[0].Object)) + .Returns(Task.FromResult(segments[1].Object)) + .Returns(Task.FromResult(segments[2].Object)) + .Returns(Task.FromResult(segments[3].Object)); + + List events = new List(); + for (int i = 0; i < eventCount; i++) + { + events.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } + + segments[0].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + segments[2].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[3].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + segments[0].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[0], + events[1] + })); + + segments[1].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[2] + })) + .Returns(Task.FromResult(new List + { + events[3] + })); + + segments[2].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[4], + events[5] + })); + + segments[3].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[6], + events[7] + })); + + long chunkIndex = 1; + long blockOffset = 2; + long eventIndex = 3; + ShardCursor shardCursor = new ShardCursor( + chunkIndex, + blockOffset, + eventIndex); + + DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); + int shardIndex = 0; + SegmentCursor segmentCursor = new SegmentCursor( + segmentTime, + new List + { + shardCursor + }, + shardIndex); + ChangeFeedCursor changeFeedCursor = new ChangeFeedCursor( + containerUri.GetHashCode(), + null, + segmentCursor); + + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor); + segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor); + + + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + serviceClient.Object); + + // Act + Page page0 = await changeFeed.GetPage(IsAsync, 3); + Page page1 = await changeFeed.GetPage(IsAsync); + + // Assert + Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page0.ContinuationToken); + + for (int i = 0; i < 3; i++) + { + Assert.AreEqual(events[i].Id, page0.Values[i].Id); + } + + Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page1.ContinuationToken); + + for (int i = 3; i < events.Count; i++) + { + Assert.AreEqual(events[i].Id, page1.Values[i - 3].Id); + } + + // ChangeFeedFactory.BuildChangeFeed() verifies + serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + // ChangeFeeed.Next() verifies. + segments[0].Verify(r => r.HasNext()); + segments[1].Verify(r => r.HasNext(), Times.Exactly(2)); + segments[2].Verify(r => r.HasNext()); + segments[3].Verify(r => r.HasNext(), Times.Exactly(3)); + + segments[0].Verify(r => r.GetPage( + IsAsync, + 3, + default)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + 1, + default)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize, + default)); + + segments[2].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize - 1, + default)); + + segments[3].Verify(r => r.GetPage( + IsAsync, + Constants.ChangeFeed.DefaultPageSize - 3, + default)); + + segments[1].Verify(r => r.GetCursor()); + segments[3].Verify(r => r.GetCursor()); + + containerClient.Verify(r => r.Uri, Times.Exactly(2)); + } + + public static Task> GetYearsPathShortFuncAsync(string continuation, int? pageSizeHint) + => Task.FromResult(GetYearsPathShortFunc(continuation, pageSizeHint)); + + public static Page GetYearsPathShortFunc( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), + BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null) + }); + + public static Task> GetSegmentsInYear2019FuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYear2019Func(continuation, pageSizeHint)); + + public static Page GetSegmentsInYear2019Func( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2019/03/02/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2019/04/03/2200/meta.json", false, null)) + }); + + public static Task> GetSegmentsInYear2020FuncAsync( + string continuation, + int? pageSizeHint) + => Task.FromResult(GetSegmentsInYear2020Func(continuation, pageSizeHint)); + + public static Page GetSegmentsInYear2020Func( + string continuation, + int? pageSizeHint) + => new BlobHierarchyItemPage(new List + { + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)) + }); } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json new file mode 100644 index 0000000000000..501543e2903fc --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/Resources/ChangeFeedManifest.json @@ -0,0 +1,12 @@ +{ + "version": 0, + "lastConsumable": "2020-05-04T19:00:00.000Z", + "storageDiagnostics": { + "version": 0, + "lastModifiedTime": "2020-05-04T19:25:09.594Z", + "data": { + "aid": "a6b895a0-7006-0041-0049-22cadf06029a", + "lfz": "2020-04-29T06:00:00.000Z" + } + } +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 41d7b654d4d72..6ce43176bc0e2 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -6,6 +6,7 @@ using System.IO; using System.Threading.Tasks; using Azure.Core; +using Azure.Core.TestFramework; using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Blobs.Models; using Moq; @@ -56,7 +57,7 @@ public async Task GetCursor() using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); - Response downloadResponse = Response.FromValue(blobDownloadInfo, new ResponseImplementation()); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { @@ -158,7 +159,7 @@ public async Task GetPage() using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); - Response downloadResponse = Response.FromValue(blobDownloadInfo, new ResponseImplementation()); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { @@ -262,40 +263,5 @@ public async Task GetPage() shards[1].Verify(r => r.Next(IsAsync, default)); shards[1].Verify(r => r.HasNext()); } - - private class ResponseImplementation : Response - { - public override int Status => throw new NotImplementedException(); - - public override string ReasonPhrase => throw new NotImplementedException(); - - public override Stream ContentStream { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } - public override string ClientRequestId { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } - - public override void Dispose() - { - throw new NotImplementedException(); - } - - protected override bool ContainsHeader(string name) - { - throw new NotImplementedException(); - } - - protected override IEnumerable EnumerateHeaders() - { - throw new NotImplementedException(); - } - - protected override bool TryGetHeader(string name, out string value) - { - throw new NotImplementedException(); - } - - protected override bool TryGetHeaderValues(string name, out IEnumerable values) - { - throw new NotImplementedException(); - } - } } } diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs index 1744fea65581b..78986bf5a8628 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs @@ -311,6 +311,7 @@ internal static class ChangeFeed public const string InitalizationSegment = "1601"; public const string MetaSegmentsPath = "meta/segments.json"; public const long ChunkBlockDownloadSize = MB; + public const int DefaultPageSize = 512; internal static class Event { From 4db0102192b0fcac1644c2df22484068913cde39 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 7 May 2020 10:55:12 -0700 Subject: [PATCH 15/30] Moved Chunk initalization logic to ChunkFactory --- .../src/Chunk.cs | 57 ++----------------- .../src/ChunkFactory.cs | 47 +++++++++++---- 2 files changed, 43 insertions(+), 61 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index 30b63170c40f5..be6052338a5e8 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; -using System.IO; using System.Threading; using System.Threading.Tasks; using Azure.Storage.Blobs.ChangeFeed.Models; @@ -16,26 +15,11 @@ namespace Azure.Storage.Blobs.ChangeFeed /// internal class Chunk : IDisposable { - /// - /// Blob Client for downloading the Chunk. - /// - private readonly BlobClient _blobClient; - /// /// Avro Reader to parser the Events. /// private readonly AvroReader _avroReader; - /// - /// Data stream. - /// - private readonly Stream _dataStream; - - /// - /// Avro head stream. - /// - private readonly Stream _headStream; - /// /// The byte offset of the beginning of the current /// Block. @@ -48,40 +32,13 @@ internal class Chunk : IDisposable public virtual long EventIndex { get; private set; } public Chunk( - BlobContainerClient containerClient, - LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory, - AvroReaderFactory avroReaderFactory, - string chunkPath, - long? blockOffset = default, - long? eventIndex = default) + AvroReader avroReader, + long blockOffset, + long eventIndex) { - _blobClient = containerClient.GetBlobClient(chunkPath); - BlockOffset = blockOffset ?? 0; - EventIndex = eventIndex ?? 0; - - _dataStream = lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( - _blobClient, - offset: BlockOffset, - blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize); - - // We aren't starting from the beginning of the Chunk - if (BlockOffset != 0) - { - _headStream = lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( - _blobClient, - offset: 0, - blockSize: 3 * Constants.KB); - - _avroReader = avroReaderFactory.BuildAvroReader( - _dataStream, - _headStream, - BlockOffset, - EventIndex); - } - else - { - _avroReader = avroReaderFactory.BuildAvroReader(_dataStream); - } + _avroReader = avroReader; + BlockOffset = blockOffset; + EventIndex = eventIndex; } public virtual bool HasNext() @@ -106,8 +63,6 @@ public virtual async Task Next( public void Dispose() { - _dataStream.Dispose(); - _headStream.Dispose(); GC.SuppressFinalize(this); } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs index e51c79b22ed2b..6c9aa77e80ad1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs @@ -1,9 +1,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System; -using System.Collections.Generic; -using System.Text; +using System.IO; +using Azure.Storage.Internal.Avro; namespace Azure.Storage.Blobs.ChangeFeed { @@ -25,13 +24,41 @@ public virtual Chunk BuildChunk( string chunkPath, long? blockOffset = default, long? eventIndex = default) - => new Chunk( - containerClient, - _lazyLoadingBlobStreamFactory, - _avroReaderFactory, - chunkPath, - blockOffset, - eventIndex); + { + BlobClient blobClient = containerClient.GetBlobClient(chunkPath); + blockOffset ??= 0; + eventIndex ??= 0; + AvroReader avroReader; + + Stream dataStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( + blobClient, + offset: blockOffset.Value, + blockSize: Constants.ChangeFeed.ChunkBlockDownloadSize); + + // We aren't starting from the beginning of the Chunk + if (blockOffset != 0) + { + Stream headStream = _lazyLoadingBlobStreamFactory.BuildLazyLoadingBlobStream( + blobClient, + offset: 0, + blockSize: 3 * Constants.KB); + + avroReader = _avroReaderFactory.BuildAvroReader( + dataStream, + headStream, + blockOffset.Value, + eventIndex.Value); + } + else + { + avroReader = _avroReaderFactory.BuildAvroReader(dataStream); + } + + return new Chunk( + avroReader, + blockOffset.Value, + eventIndex.Value); + } /// /// Constructor for mocking. From 68d772f2bc01578d66413a1ebcdb5cf1245de5bf Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 7 May 2020 11:52:32 -0700 Subject: [PATCH 16/30] Re-recorded tests --- .../src/Chunk.cs | 7 +- .../src/Segment.cs | 1 - .../src/Shard.cs | 5 +- .../tests/BlobChangeFeedAsyncPagableTests.cs | 10 +- .../tests/ChangeFeedTestBase.cs | 2 +- .../GetSegmentsInYearTest.json | 4 + .../GetSegmentsInYearTestAsync.json | 4 + .../RoundDownToNearestHourTests.json | 4 + .../RoundDownToNearestHourTestsAsync.json | 4 + .../RoundDownToNearestYearTests.json | 4 + .../RoundDownToNearestYearTestsAsync.json | 4 + .../RoundUpToNearestHourTests.json | 4 + .../RoundUpToNearestHourTestsAsync.json | 4 + .../ToDateTimeOffsetTests.json | 4 + .../ToDateTimeOffsetTestsAsync.json | 4 + .../GetYearPathsTest.json | 4 + .../GetYearPathsTestAsync.json | 4 + .../ChangeFeedTests/GetCursor.json | 4 + .../ChangeFeedTests/GetCursorAsync.json | 4 + .../ChangeFeedTests/GetPage.json | 4 + .../ChangeFeedTests/GetPageAsync.json | 4 + .../tests/ShardTests.cs | 126 +++++++++++++----- .../src/Shared/StorageVersionExtensions.cs | 2 +- 23 files changed, 167 insertions(+), 50 deletions(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs index be6052338a5e8..37262bf3cc5fd 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Chunk.cs @@ -13,7 +13,7 @@ namespace Azure.Storage.Blobs.ChangeFeed /// /// Chunk. /// - internal class Chunk : IDisposable + internal class Chunk { /// /// Avro Reader to parser the Events. @@ -61,11 +61,6 @@ public virtual async Task Next( return new BlobChangeFeedEvent(result); } - public void Dispose() - { - GC.SuppressFinalize(this); - } - /// /// Constructor for mocking. Do not use. /// diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 3c099e722179f..788d784307403 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; using Azure.Storage.Blobs.ChangeFeed.Models; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index fc821c24adcbe..ea9a26f3e1fc5 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -10,7 +10,7 @@ namespace Azure.Storage.Blobs.ChangeFeed { - internal class Shard : IDisposable + internal class Shard { /// /// Container Client for listing Chunks. @@ -79,9 +79,6 @@ public virtual async Task Next( return changeFeedEvent; } - /// - public void Dispose() => _currentChunk.Dispose(); - /// /// Constructor for use by . /// diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs index 2da82aa96d502..d188628f4bb20 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/BlobChangeFeedAsyncPagableTests.cs @@ -15,16 +15,12 @@ namespace Azure.Storage.Blobs.ChangeFeed.Tests public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase { public BlobChangeFeedAsyncPagableTests(bool async) - : base(async, RecordedTestMode.Live /* RecordedTestMode.Record /* to re-record */) + : base(async, null /* RecordedTestMode.Record /* to re-record */) { } - //TODO better cursor tests - //TODO start and end time tests - //TODO page size tests - [Test] - //[Ignore("")] + [Ignore("")] public async Task Test() { BlobServiceClient service = GetServiceClient_SharedKey(); @@ -61,7 +57,7 @@ IAsyncEnumerator> asyncEnumerator } [Test] - //[Ignore("")] + [Ignore("")] public async Task CursorTest() { BlobServiceClient service = GetServiceClient_SharedKey(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index 6c282adecccb1..aefdc00ff9894 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -18,7 +18,7 @@ public class ChangeFeedTestBase : StorageTestBase public ChangeFeedTestBase(bool async) : this(async, null) { } public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) - : base(async, RecordedTestMode.Live) + : base(async, mode) { } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/GetSegmentsInYearTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestHourTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundDownToNearestYearTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/RoundUpToNearestHourTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTests.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/BlobChangeFeedExtensionsTests/ToDateTimeOffsetTestsAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTest.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedFactoryTests/GetYearPathsTestAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursor.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetCursorAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPage.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/GetPageAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index 1808b7d73a899..ebfbb8e70771b 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -123,7 +123,7 @@ public async Task HasNext_False() { // Arrange string shardPath = "shardPath"; - long chunkIndex = 4; + long chunkIndex = 5; long blockOffset = 100; long eventIndex = 200; @@ -204,7 +204,7 @@ public async Task HasNext_False() chunkFactory.Verify(r => r.BuildChunk( containerClient.Object, - "chunk4", + "chunk5", blockOffset, eventIndex)); @@ -305,7 +305,7 @@ public async Task HasNext_CurrentChunkHasNext() { // Arrange string shardPath = "shardPath"; - long chunkIndex = 4; + long chunkIndex = 5; long blockOffset = 100; long eventIndex = 200; @@ -386,7 +386,7 @@ public async Task HasNext_CurrentChunkHasNext() chunkFactory.Verify(r => r.BuildChunk( containerClient.Object, - "chunk4", + "chunk5", blockOffset, eventIndex)); @@ -397,11 +397,21 @@ public async Task HasNext_CurrentChunkHasNext() public async Task Next() { // Arrange - Guid eventId = Guid.NewGuid(); - BlobChangeFeedEvent expectedChangeFeedEvent = new BlobChangeFeedEvent + int chunkCount = 4; + int eventCount = 8; + Mock containerClient = new Mock(MockBehavior.Strict); + Mock chunkFactory = new Mock(MockBehavior.Strict); + List> chunks = new List>(); + + List expectedChangeFeedEvents = new List(); + for (int i = 0; i < eventCount; i++) { - Id = eventId - }; + chunks.Add(new Mock(MockBehavior.Strict)); + expectedChangeFeedEvents.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } string shardPath = "shardPath"; long chunkIndex = 2; @@ -413,10 +423,6 @@ public async Task Next() blockOffset, eventIndex); - Mock containerClient = new Mock(MockBehavior.Strict); - Mock chunkFactory = new Mock(MockBehavior.Strict); - Mock chunk = new Mock(MockBehavior.Strict); - if (IsAsync) { AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetChunkPagesFuncAsync); @@ -440,24 +446,46 @@ public async Task Next() default)).Returns(pageable); } - chunkFactory.Setup(r => r.BuildChunk( + chunkFactory.SetupSequence(r => r.BuildChunk( It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(chunk.Object); + .Returns(chunks[0].Object) + .Returns(chunks[1].Object) + .Returns(chunks[2].Object) + .Returns(chunks[3].Object); - chunk.Setup(r => r.Next( - It.IsAny(), - default)) - .Returns(Task.FromResult(expectedChangeFeedEvent)); + chunks[0].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); - chunk.SetupSequence(r => r.HasNext()) + chunks[1].SetupSequence(r => r.HasNext()) .Returns(true) - .Returns(true); + .Returns(false); - chunk.Setup(r => r.BlockOffset).Returns(blockOffset); - chunk.Setup(r => r.EventIndex).Returns(eventIndex); + chunks[2].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + chunks[3].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(true) + .Returns(true) + .Returns(false); + + for (int i = 0; i < chunkCount; i++) + { + + chunks[i].SetupSequence(r => r.Next( + It.IsAny(), + default)) + .Returns(Task.FromResult(expectedChangeFeedEvents[2 * i])) + .Returns(Task.FromResult(expectedChangeFeedEvents[2 * i + 1])); + } + + chunks[2].Setup(r => r.BlockOffset).Returns(blockOffset); + chunks[2].Setup(r => r.EventIndex).Returns(eventIndex); ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); @@ -469,12 +497,25 @@ public async Task Next() shardCursor) .ConfigureAwait(false); - BlobChangeFeedEvent changeFeedEvent = await shard.Next(IsAsync); + List changeFeedEvents = new List(); + for (int i = 0; i < 4; i++) + { + changeFeedEvents.Add(await shard.Next(IsAsync)); + } ShardCursor cursor = shard.GetCursor(); + for (int i = 0; i < 4; i++) + { + changeFeedEvents.Add(await shard.Next(IsAsync)); + } // Assert - Assert.AreEqual(eventId, changeFeedEvent.Id); - Assert.AreEqual(2, cursor.ChunkIndex); + for (int i = 0; i < eventCount; i++) + { + Assert.AreEqual(expectedChangeFeedEvents[i].Id, changeFeedEvents[i].Id); + } + + Assert.AreEqual(4, cursor.ChunkIndex); + Assert.AreEqual(eventIndex, cursor.EventIndex); if (IsAsync) { @@ -500,12 +541,34 @@ public async Task Next() "chunk2", blockOffset, eventIndex)); + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk3", + default, + default)); + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk4", + default, + default)); + chunkFactory.Verify(r => r.BuildChunk( + containerClient.Object, + "chunk5", + default, + default)); - chunk.Verify(r => r.HasNext()); - chunk.Verify(r => r.Next(IsAsync, default)); - chunk.Verify(r => r.HasNext()); - chunk.Verify(r => r.BlockOffset); - chunk.Verify(r => r.EventIndex); + for (int i = 0; i < chunkCount; i++) + { + chunks[i].Verify(r => r.Next(IsAsync, default), Times.Exactly(2)); + } + + chunks[0].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[1].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[2].Verify(r => r.HasNext(), Times.Exactly(2)); + chunks[3].Verify(r => r.HasNext(), Times.Exactly(4)); + + chunks[2].Verify(r => r.BlockOffset); + chunks[2].Verify(r => r.EventIndex); } private static Task> GetChunkPagesFuncAsync( @@ -533,6 +596,9 @@ private static Page GetChunkPagesFunc( BlobsModelFactory.BlobHierarchyItem( null, BlobsModelFactory.BlobItem("chunk4", false, null)), + BlobsModelFactory.BlobHierarchyItem( + null, + BlobsModelFactory.BlobItem("chunk5", false, null)) }); } } diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs index 9de33dc548600..ff480ae550a65 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/StorageVersionExtensions.cs @@ -51,7 +51,7 @@ public static string ToVersionString(this ServiceVersion version) => ServiceVersion.V2019_02_02 => "2019-02-02", ServiceVersion.V2019_07_07 => "2019-07-07", // TODO this is temporary until 73 goes to stage. - ServiceVersion.V2019_12_12 => "2019-10-10", + ServiceVersion.V2019_12_12 => "2019-12-12", #elif QueueSDK // Queues just bumped the version number without changing the swagger ServiceVersion.V2019_02_02 => "2018-11-09", From 4757ce3ca0d3a57bf233ba95a02305087f6463af Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 7 May 2020 13:25:27 -0700 Subject: [PATCH 17/30] Fixed CI --- .../tests/ChangeFeedTests.cs | 11 +++++++---- .../tests/SegmentTests.cs | 7 +++++-- .../tests/AvroReaderTests.cs | 4 +++- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 80146be3cdfe4..5fc116352101b 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Reflection; using System.Text; using System.Text.Json; using System.Threading.Tasks; @@ -50,7 +51,8 @@ public async Task GetCursor() containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); - using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); @@ -140,7 +142,7 @@ public async Task GetCursor() DateTimeOffset endDateTime = new DateTimeOffset(2020, 5, 6, 18, 0, 0, TimeSpan.Zero); ChangeFeedCursor expectedCursor = new ChangeFeedCursor( - urlHash: containerUri.GetHashCode(), + urlHash: containerUri.ToString().GetHashCode(), endDateTime: endDateTime, currentSegmentCursor: segmentCursor); @@ -285,7 +287,8 @@ public async Task GetPage() containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); - using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); @@ -445,7 +448,7 @@ public async Task GetPage() }, shardIndex); ChangeFeedCursor changeFeedCursor = new ChangeFeedCursor( - containerUri.GetHashCode(), + containerUri.ToString().GetHashCode(), null, segmentCursor); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 6ce43176bc0e2..5849e39e46f75 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Reflection; using System.Threading.Tasks; using Azure.Core; using Azure.Core.TestFramework; @@ -55,7 +56,8 @@ public async Task GetCursor() containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); - using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); @@ -157,7 +159,8 @@ public async Task GetPage() containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); - using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"SegmentManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); diff --git a/sdk/storage/Azure.Storage.Internal.Avro/tests/AvroReaderTests.cs b/sdk/storage/Azure.Storage.Internal.Avro/tests/AvroReaderTests.cs index 944fee588e246..92d6cc412d675 100644 --- a/sdk/storage/Azure.Storage.Internal.Avro/tests/AvroReaderTests.cs +++ b/sdk/storage/Azure.Storage.Internal.Avro/tests/AvroReaderTests.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Reflection; using System.Text; using System.Threading.Tasks; using NUnit.Framework; @@ -48,7 +49,8 @@ public async Task Tests() foreach (TestCase testCase in testCases) { // Arrange - using FileStream stream = File.OpenRead($"Resources{Path.DirectorySeparatorChar}{testCase.Path}"); + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{testCase.Path}"); AvroReader avroReader = new AvroReader(stream); // Act From 34e837e0698259d982b6817e86a92490ca5ed9bf Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 7 May 2020 14:02:17 -0700 Subject: [PATCH 18/30] PR comments --- .../src/ChangeFeed.cs | 1 - .../src/ChangeFeedFactory.cs | 3 +-- .../src/LazyLoadingBlobStream.cs | 1 - .../src/Models/BlobChangeFeedEventPage.cs | 14 +------------- .../src/Models/BlobChangeFeedModelFactory.cs | 5 +---- .../src/SegmentFactory.cs | 1 - .../tests/ChangeFeedTests.cs | 7 +++++-- .../tests/ChunkTests.cs | 10 ++++++++++ .../tests/LazyLoadingBlobStreamTests.cs | 8 +++++++- .../tests/SegmentTests.cs | 3 +++ .../tests/ShardTests.cs | 17 +++++++++++++++++ 11 files changed, 45 insertions(+), 25 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 877268f8505f2..98ea9ef23893a 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -108,7 +108,6 @@ public async Task> GetPage( while (blobChangeFeedEvents.Count < pageSize && HasNext()) { - //TODO what if segment doesn't have a page size worth of data? List newEvents = await _currentSegment.GetPage( async, remainingEvents, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs index 6ec3584abbe71..d0e6465925ced 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs @@ -71,8 +71,7 @@ public async Task BuildChangeFeed( if (!changeFeedContainerExists) { - //TODO improve this error message - throw new ArgumentException("Change Feed hasn't been enabled on this account, or is current being enabled."); + throw new ArgumentException("Change Feed hasn't been enabled on this account, or is currently being enabled."); } // Get last consumable diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs index c9134848dc074..9d2ac838acd1b 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/LazyLoadingBlobStream.cs @@ -150,7 +150,6 @@ private async Task ReadInternal( if (copiedBytes == 0) { // We hit the end of the blob with the last download call. - //TODO what if the blob is growing? if (_offset == _blobLength) { return totalCopiedBytes; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs index 93a259896ef86..98bab185c172d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs @@ -21,17 +21,5 @@ public BlobChangeFeedEventPage(List events, string continua Values = events; ContinuationToken = continuationToken; } - - // public BlobChangeFeedEventPage(Response raw, List data) - // { - // _raw = raw; - // ContinuationToken = null; - // var changes = new List(); - // foreach (GenericRecord value in data) - // { - // changes.Add(new BlobChangeFeedEvent(value)); - // } - // Values = changes; - // } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs index 5ee16ae607562..fd571354030ee 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedModelFactory.cs @@ -2,12 +2,9 @@ // Licensed under the MIT License. using System; -using System.Collections.Generic; -using System.Text; -using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Blobs.Models; -namespace Azure.Storage.ChangeFeed.Models +namespace Azure.Storage.Blobs.ChangeFeed.Models { /// /// BlobChangeFeedModelFactory for building mock objects. diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs index 93d08e9462a3c..c40bbcfb90b95 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs @@ -70,7 +70,6 @@ public virtual async Task BuildSegment( int i = 0; foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) { - //TODO cleanup this line string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); Shard shard = await _shardFactory.BuildShard( async, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 5fc116352101b..269f093c3385f 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -25,6 +25,10 @@ public ChangeFeedTests(bool async) { } + /// + /// Tests building a ChangeFeed with a ChangeFeedCursor, and then calling ChangeFeed.GetCursor() + /// and making sure the cursors match. + /// [Test] public async Task GetCursor() { @@ -248,10 +252,9 @@ public async Task GetCursor() /// /// This test has 8 total events, 4 segments, and 2 years. - /// We call ChangeFeed.GetPage() with a page size of 3, and then no page size, + /// We call ChangeFeed.GetPage() with a page size of 3, and then again with no page size, /// resulting in two pages with 3 and 5 Events. /// - /// [Test] public async Task GetPage() { diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs index d3c178895fb49..b1dccce4bf737 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -22,6 +22,9 @@ public ChunkTests(bool async) { } + /// + /// Tests Chunk.HasNext() when the underlying AvroReader.HasNext() returns true. + /// [Test] public void HasNext_True() { @@ -65,6 +68,10 @@ public void HasNext_True() avroReader.Verify(r => r.HasNext()); } + + /// + /// Tests Chunk.HasNext() when the underlying AvroReader.HasNext() returns false. + /// [Test] public void HasNext_False() { @@ -108,6 +115,9 @@ public void HasNext_False() avroReader.Verify(r => r.HasNext()); } + /// + /// Tests Chunk.Next() and the BlobChangeFeedEvent and BlobChangeFeedEventData constructors. + /// [Test] public async Task Next() { diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs index b3423a65fe220..c23211c41a1cf 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/LazyLoadingBlobStreamTests.cs @@ -18,6 +18,9 @@ public LazyLoadingBlobStreamTests(bool async) { } + /// + /// Tests Read() with various sized Reads(). + /// [Test] public async Task ReadAsync() { @@ -52,6 +55,9 @@ public async Task ReadAsync() TestHelper.AssertSequenceEqual(exectedData, actualData); } + /// + /// Tests LazyBlobStream parameter validation. + /// [Test] public async Task ReadAsync_InvalidParameterTests() { @@ -89,4 +95,4 @@ await TestHelper.AssertExpectedExceptionAsync( e.Message)); } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index 5849e39e46f75..fdfefcd969909 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -22,6 +22,9 @@ public SegmentTests(bool async) { } + /// + /// Test building a Segment with a SegmentCursor, and then calling Segment.GetCursor(). + /// [Test] public async Task GetCursor() { diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index ebfbb8e70771b..757fb143fcf54 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -20,6 +20,9 @@ public ShardTests(bool async) { } + /// + /// Tests creating a Shard with a ShardCursor, and then calling Shard.GetCursor(). + /// [Test] public async Task GetCursor() { @@ -118,6 +121,9 @@ public async Task GetCursor() chunk.Verify(r => r.EventIndex); } + /// + /// Tests Shard.HasNext(). + /// [Test] public async Task HasNext_False() { @@ -211,6 +217,9 @@ public async Task HasNext_False() chunk.Verify(r => r.HasNext()); } + /// + /// Tests Shard.HasNext(). + /// [Test] public async Task HasNext_ChunksLeft() { @@ -300,6 +309,9 @@ public async Task HasNext_ChunksLeft() eventIndex)); } + /// + /// Tests Shard.HasNext(). + /// [Test] public async Task HasNext_CurrentChunkHasNext() { @@ -393,6 +405,11 @@ public async Task HasNext_CurrentChunkHasNext() chunk.Verify(r => r.HasNext()); } + /// + /// In this test, the Shard has 4 Chunks with 2 Events in each Chunk. + /// We call ShardFactory.BuildShard() with a ShardCursor, to create the Shard, + /// Shard.Next() 4 times, Shard.GetCursor(), and then Shard.Next 4 times. + /// [Test] public async Task Next() { From 442c07af5b6428baae564b90f74a711d74ebf113 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 8 May 2020 14:33:19 -0700 Subject: [PATCH 19/30] Filled out readme --- .../Azure.Storage.Blobs.ChangeFeed/README.md | 83 +++++++++++++++++-- 1 file changed, 77 insertions(+), 6 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md index 293260514c432..61fd090d01729 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md @@ -1,20 +1,91 @@ # Azure Storage Blobs Change Feed client library for .NET -> Server Version: 2019-07-07 +> Server Version: 2019-12-12 + +The purpose of the change feed is to provide transaction logs of all the changes that occur to +the blobs and the blob metadata in your storage account. The change feed provides ordered, +guaranteed, durable, immutable, read-only log of these changes. Client applications can read these +logs at any time. The change feed enables you to build efficient and scalable solutions that +process change events that occur in your Blob Storage account at a low cost. + +[Source code][source] | [Product documentation][product_docs] + ## Getting started -- TODO + +### Install the package +- TODO after we have released. + +### Prerequisites + +You need an [Azure subscription][azure_sub] and a +[Storage Account][storage_account_docs] to use this package. + +To create a new Storage Account, you can use the [Azure Portal][storage_account_create_portal], +[Azure PowerShell][storage_account_create_ps], or the [Azure CLI][storage_account_create_cli]. +Here's an example using the Azure CLI: + +```Powershell +az storage account create --name MyStorageAccount --resource-group MyResourceGroup --location westus --sku Standard_LRS +``` ## Key concepts -- TODO + +The change feed is stored as blobs in a special container in your storage account at standard blob +pricing cost. You can control the retention period of these files based on your requirements +(See the conditions of the current release). Change events are appended to the change feed as records +in the Apache Avro format specification: a compact, fast, binary format that provides rich data structures +with inline schema. This format is widely used in the Hadoop ecosystem, Stream Analytics, and Azure Data +Factory. + +You can process these logs incrementally or in-full. Any number of client applications can independently +read the change feed, in parallel, and at their own pace. Analytics applications such as Apache Drill or +Apache Spark can consume logs directly as Avro files, which let you process them at a low-cost, with +high-bandwidth, and without having to write a custom application. ## Examples - TODO ## Troubleshooting -- TODO +All Blob service operations will throw a +[RequestFailedException][RequestFailedException] on failure with +helpful [`ErrorCode`s][error_codes]. Many of these errors are recoverable. ## Next steps -- TODO + +Get started with our [Change Feed samples][samples]: + +1. [Hello World](samples/Sample01a_HelloWorld.cs): Get changes that have occured in your storage account (or [asynchronously](samples/Sample01b_HelloWorldAsync.cs)) +2. [Auth](samples/Sample02_Auth.cs): Authenticate with connection strings, public access, shared keys, shared access signatures, and Azure Active Directory. + ## Contributing -- TODO \ No newline at end of file + +See the [Storage CONTRIBUTING.md][storage_contrib] for details on building, +testing, and contributing to this library. + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. For +details, visit [cla.microsoft.com][cla]. + +This project has adopted the [Microsoft Open Source Code of Conduct][coc]. +For more information see the [Code of Conduct FAQ][coc_faq] +or contact [opencode@microsoft.com][coc_contact] with any +additional questions or comments. + + +[source]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/storage/Azure.Storage.Blobs/srcs +[product_docs]: https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-change-feed +[azure_sub]: https://azure.microsoft.com/free/ +[storage_account_docs]: https://docs.microsoft.com/azure/storage/common/storage-account-overview +[storage_account_create_ps]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell +[storage_account_create_cli]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli +[storage_account_create_portal]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal +[RequestFailedException]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/core/Azure.Core/src/RequestFailedException.cs +[error_codes]: https://docs.microsoft.com/rest/api/storageservices/blob-service-error-codes +[samples]: samples/ +[storage_contrib]: ../CONTRIBUTING.md +[cla]: https://cla.microsoft.com +[coc]: https://opensource.microsoft.com/codeofconduct/ +[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ +[coc_contact]: mailto:opencode@microsoft.com \ No newline at end of file From d50a2b70768420b151dad6bebb90899a613cc962 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 8 May 2020 15:29:33 -0700 Subject: [PATCH 20/30] Added samples --- .../Azure.Storage.Blobs.ChangeFeed/README.md | 54 +++++++++- .../samples/Sample01a_HelloWorld.cs | 96 ++++++++++++++++- .../samples/Sample01b_HelloWorldAsync.cs | 102 +++++++++++++++++- .../samples/Sample02_Auth.cs | 26 ----- 4 files changed, 242 insertions(+), 36 deletions(-) delete mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md index 61fd090d01729..fb6d333f06b0d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md @@ -43,7 +43,59 @@ Apache Spark can consume logs directly as Avro files, which let you process them high-bandwidth, and without having to write a custom application. ## Examples -- TODO + +### Get all events in the Change Feed +```C# Snippet:SampleSnippetsChangeFeed_GetAllEvents +// Get all the events in the change feed. +List changeFeedEvents = new List(); +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync()) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` + +### Get events between a start and end time +```C# Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime +// Create the start and end time. The change feed client will round start time down to +// the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets +// with minutes and seconds. +DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); +DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + +// You can also provide just a start or end time. +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + start: startTime, + end: endTime)) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` + +### Resume with cursor +```C# Snippet:SampleSnippetsChangeFeed_ResumeWithCursor +IAsyncEnumerator> enumerator = changeFeedClient + .GetChangesAsync() + .AsPages(pageSizeHint: 10) + .GetAsyncEnumerator(); + +await enumerator.MoveNextAsync(); + +foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) +{ + changeFeedEvents.Add(changeFeedEvent); +} + +// get the change feed cursor. The cursor is not required to get each page of events, +// it is intended to be saved and used to resume iterating at a later date. +string cursor = enumerator.Current.ContinuationToken; + +// Resume iterating from the pervious position with the cursor. +await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + continuation: cursor)) +{ + changeFeedEvents.Add(changeFeedEvent); +} +``` ## Troubleshooting All Blob service operations will throw a diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs index 3968fd12b0a91..fcf9da2a85f30 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01a_HelloWorld.cs @@ -6,6 +6,7 @@ using System.IO; using System.Linq; using System.Reflection.Metadata.Ecma335; +using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Samples @@ -16,12 +17,99 @@ namespace Azure.Storage.Blobs.ChangeFeed.Samples public class Sample01a_HelloWorld : SampleTest { /// - /// Sample sample. + /// Download every event in the change feed. /// [Test] - public void SampleSample() + public void ChangeFeed() { - Assert.AreEqual(1, 1); + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + + // Get all the events in the change feed. + List changeFeedEvents = new List(); + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges()) + { + changeFeedEvents.Add(changeFeedEvent); + } + } + + /// + /// Download change feed events between a start and end time. + /// + [Test] + public void ChangeFeedBetweenDates() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + // Create the start and end time. The change feed client will round start time down to + // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets + // with minutes and seconds. + DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); + DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + + // You can also provide just a start or end time. + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges( + start: startTime, + end: endTime)) + { + changeFeedEvents.Add(changeFeedEvent); + } + } + + /// + /// You can use the change feed cursor to resume iterating throw the change feed + /// at a later time. + /// + [Test] + public void ChangeFeedResumeWithCursor() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + IEnumerator> enumerator = changeFeedClient + .GetChanges() + .AsPages(pageSizeHint: 10) + .GetEnumerator(); + ; + + enumerator.MoveNext(); + + foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) + { + changeFeedEvents.Add(changeFeedEvent); + } + + // get the change feed cursor. The cursor is not required to get each page of events, + // it is intended to be saved and used to resume iterating at a later date. + string cursor = enumerator.Current.ContinuationToken; + + // Resume iterating from the pervious position with the cursor. + foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChanges( + continuation: cursor)) + { + changeFeedEvents.Add(changeFeedEvent); + } } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs index aaea9a654464c..3dc4f738969ac 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample01b_HelloWorldAsync.cs @@ -7,6 +7,7 @@ using System.Linq; using System.Threading.Tasks; using Azure.Storage; +using Azure.Storage.Blobs.ChangeFeed.Models; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Samples @@ -17,13 +18,104 @@ namespace Azure.Storage.Blobs.ChangeFeed.Samples public class Sample01b_HelloWorldAsync : SampleTest { /// - /// Sample sample. + /// Download every event in the change feed. /// [Test] - public async Task SampleSample() + public async Task ChangeFeedAsync() { - await Task.CompletedTask; - Assert.AreEqual(1, 1); + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + + #region Snippet:SampleSnippetsChangeFeed_GetAllEvents + // Get all the events in the change feed. + List changeFeedEvents = new List(); + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync()) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion + } + + /// + /// Download change feed events between a start and end time. + /// + [Test] + public async Task ChangeFeedBetweenDatesAsync() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + #region Snippet:SampleSnippetsChangeFeed_GetEventsBetweenStartAndEndTime + // Create the start and end time. The change feed client will round start time down to + // the nearest hour, and round endTime up to the next hour if you provide DateTimeOffsets + // with minutes and seconds. + DateTimeOffset startTime = new DateTimeOffset(2017, 3, 2, 15, 0, 0, TimeSpan.Zero); + DateTimeOffset endTime = new DateTimeOffset(2020, 10, 7, 2, 0, 0, TimeSpan.Zero); + + // You can also provide just a start or end time. + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + start: startTime, + end: endTime)) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion + } + + /// + /// You can use the change feed cursor to resume iterating throw the change feed + /// at a later time. + /// + [Test] + public async Task ChangeFeedResumeWithCursorAsync() + { + // Get a connection string to our Azure Storage account. + string connectionString = ConnectionString; + + // Get a new blob service client. + BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); + + // Get a new change feed client. + BlobChangeFeedClient changeFeedClient = blobServiceClient.GetChangeFeedClient(); + List changeFeedEvents = new List(); + + #region Snippet:SampleSnippetsChangeFeed_ResumeWithCursor + IAsyncEnumerator> enumerator = changeFeedClient + .GetChangesAsync() + .AsPages(pageSizeHint: 10) + .GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + + foreach (BlobChangeFeedEvent changeFeedEvent in enumerator.Current.Values) + { + changeFeedEvents.Add(changeFeedEvent); + } + + // get the change feed cursor. The cursor is not required to get each page of events, + // it is intended to be saved and used to resume iterating at a later date. + string cursor = enumerator.Current.ContinuationToken; + + // Resume iterating from the pervious position with the cursor. + await foreach (BlobChangeFeedEvent changeFeedEvent in changeFeedClient.GetChangesAsync( + continuation: cursor)) + { + changeFeedEvents.Add(changeFeedEvent); + } + #endregion } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs deleted file mode 100644 index aa7410280d819..0000000000000 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/samples/Sample02_Auth.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -using System; -using System.IO; -using System.Net; -using System.Text; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Identity; -using Azure.Storage; -using Azure.Storage.Sas; -using NUnit.Framework; - -namespace Azure.Storage.Blobs.ChangeFeed.Samples -{ - public class Sample02_Auth : SampleTest - { - [Test] - public async Task SampleSample() - { - await Task.CompletedTask; - Assert.IsTrue(true); - } - } -} \ No newline at end of file From 21bedda868a3fbc23d5b798613e0e3820df2759d Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Mon, 11 May 2020 12:32:03 -0700 Subject: [PATCH 21/30] PR comments --- .../src/BlobChangeFeedClient.cs | 101 +++++++++++++++++- .../src/BlobChangeFeedExtensions.cs | 9 +- 2 files changed, 107 insertions(+), 3 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs index 43f2ccbcd3569..d469d28534f51 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedClient.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Text; using System.Threading.Tasks; +using Azure.Core; using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; @@ -27,6 +28,104 @@ internal BlobChangeFeedClient(BlobServiceClient blobServiceClient) _blobServiceClient = blobServiceClient; } + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A connection string includes the authentication information + /// required for your application to access data in an Azure Storage + /// account at runtime. + /// + /// For more information, . + /// + public BlobChangeFeedClient(string connectionString) + { + _blobServiceClient = new BlobServiceClient(connectionString); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A connection string includes the authentication information + /// required for your application to access data in an Azure Storage + /// account at runtime. + /// + /// For more information, . + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(string connectionString, BlobClientOptions options) + + { + _blobServiceClient = new BlobServiceClient(connectionString, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// The shared key credential used to sign requests. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, StorageSharedKeyCredential credential, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, credential, options); + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// + /// A referencing the blob service. + /// This is likely to be similar to "https://{account_name}.blob.core.windows.net". + /// + /// + /// The token credential used to sign requests. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request. + /// + public BlobChangeFeedClient(Uri serviceUri, TokenCredential credential, BlobClientOptions options = default) + { + _blobServiceClient = new BlobServiceClient(serviceUri, credential, options); + } + /// /// GetChanges. /// @@ -117,4 +216,4 @@ public virtual BlobChangeFeedAsyncPagable GetChangesAsync( return asyncPagable; } } -} \ No newline at end of file +} diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs index eeeb545b26545..db2823badacca 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedExtensions.cs @@ -34,8 +34,13 @@ public static BlobChangeFeedClient GetChangeFeedClient(this BlobServiceClient se { return default; } - segmentPath = segmentPath.Trim('/'); - string[] splitPath = segmentPath.Split('/'); + string[] splitPath = segmentPath.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries); + + if (splitPath.Length < 3) + { + throw new ArgumentException($"{nameof(segmentPath)} is not a valid segment path."); + } + return new DateTimeOffset( year: int.Parse(splitPath[2], CultureInfo.InvariantCulture), month: splitPath.Length >= 4 From 353b928372abcf0c7134cc4e2c7d464a69be073a Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 27 May 2020 00:52:36 -0500 Subject: [PATCH 22/30] Updated readme to pass CI --- sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md index fb6d333f06b0d..4e81313d00f72 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/README.md @@ -28,6 +28,10 @@ Here's an example using the Azure CLI: az storage account create --name MyStorageAccount --resource-group MyResourceGroup --location westus --sku Standard_LRS ``` +### Authenticate the Client + +Authentication works the same as in [Azure.Storage.Blobs][authenticating_with_blobs]. + ## Key concepts The change feed is stored as blobs in a special container in your storage account at standard blob @@ -133,6 +137,7 @@ additional questions or comments. [storage_account_create_ps]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell [storage_account_create_cli]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli [storage_account_create_portal]: https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal +[authenticating_with_blobs]: https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/storage/Azure.Storage.Blobs/samples/Sample02_Auth.cs [RequestFailedException]: https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/core/Azure.Core/src/RequestFailedException.cs [error_codes]: https://docs.microsoft.com/rest/api/storageservices/blob-service-error-codes [samples]: samples/ From 105c4356188a946d3b552f53d40be5baf5b8878c Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 27 May 2020 11:35:07 -0500 Subject: [PATCH 23/30] Fixed some edge cases and added tests --- .../src/ChangeFeed.cs | 17 + .../src/ChangeFeedFactory.cs | 32 +- .../tests/ChangeFeedTestBase.cs | 2 +- .../tests/ChangeFeedTests.cs | 376 +++++++++++++++++- 4 files changed, 412 insertions(+), 15 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 98ea9ef23893a..af459890113b4 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -55,6 +55,11 @@ internal class ChangeFeed /// private DateTimeOffset? _endTime; + /// + /// If this Change Feed has no events. + /// + private bool _empty; + public ChangeFeed( BlobContainerClient containerClient, SegmentFactory segmentFactory, @@ -73,6 +78,7 @@ public ChangeFeed( _lastConsumable = lastConsumable; _startTime = startTime; _endTime = endTime; + _empty = false; } /// @@ -122,6 +128,11 @@ public async Task> GetPage( public bool HasNext() { + if (_empty) + { + return false; + } + // We have no more segments, years, and the current segment doesn't have hext. if (_segments.Count == 0 && _years.Count == 0 && !_currentSegment.HasNext()) { @@ -229,5 +240,11 @@ private async Task AdvanceSegmentIfNecessary(bool async) } } } + + public static ChangeFeed Empty() + => new ChangeFeed + { + _empty = true + }; } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs index d0e6465925ced..2d8bd1c43f775 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs @@ -38,8 +38,8 @@ public async Task BuildChangeFeed( { BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); DateTimeOffset lastConsumable; - Queue years; - Queue segments; + Queue years = new Queue(); + Queue segments = new Queue(); ChangeFeedCursor cursor = null; // Create cursor @@ -111,21 +111,29 @@ public async Task BuildChangeFeed( } } + // There are no years. if (years.Count == 0) { - return new ChangeFeed(); + return ChangeFeed.Empty(); } - string firstYearPath = years.Dequeue(); + while (segments.Count == 0 && years.Count > 0) + { + // Get Segments for year + segments = await BlobChangeFeedExtensions.GetSegmentsInYear( + async: async, + containerClient: containerClient, + yearPath: years.Dequeue(), + startTime: startTime, + endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime)) + .ConfigureAwait(false); + } - // Get Segments for first year - segments = await BlobChangeFeedExtensions.GetSegmentsInYear( - async: async, - containerClient: containerClient, - yearPath: firstYearPath, - startTime: startTime, - endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime)) - .ConfigureAwait(false); + // We were on the last year, and there were no more segments. + if (segments.Count == 0) + { + return ChangeFeed.Empty(); + } Segment currentSegment = await _segmentFactory.BuildSegment( async, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index aefdc00ff9894..6c282adecccb1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -18,7 +18,7 @@ public class ChangeFeedTestBase : StorageTestBase public ChangeFeedTestBase(bool async) : this(async, null) { } public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) - : base(async, mode) + : base(async, RecordedTestMode.Live) { } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 269f093c3385f..f71de5a728df0 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -462,8 +462,6 @@ public async Task GetPage() segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor); segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor); - - ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, @@ -601,6 +599,380 @@ public async Task GetPage() containerClient.Verify(r => r.Uri, Times.Exactly(2)); } + [Test] + public async Task NoYearsAfterStartTime() + { + // Arrange + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Mock segment = new Mock(MockBehavior.Strict); + + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + serviceClient.Object, + startTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero)); + + // Act + bool hasNext = changeFeed.HasNext(); + + // Assert + Assert.IsFalse(hasNext); + + serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + } + + [Test] + public async Task NoSegmentsRemainingInStartYear() + { + // Arrange + int eventCount = 2; + int segmentCount = 2; + Mock serviceClient = new Mock(MockBehavior.Strict); + Mock containerClient = new Mock(MockBehavior.Strict); + Mock blobClient = new Mock(MockBehavior.Strict); + Mock segmentFactory = new Mock(MockBehavior.Strict); + Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); + + List> segments = new List>(); + for (int i = 0; i < segmentCount; i++) + { + segments.Add(new Mock(MockBehavior.Strict)); + } + + // ChangeFeedFactory.BuildChangeFeed() setups. + serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny())).Returns(containerClient.Object); + containerClient.SetupSequence(r => r.Uri) + .Returns(containerUri) + .Returns(containerUri); + + if (IsAsync) + { + containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); + } + else + { + containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); + } + + containerClient.Setup(r => r.GetBlobClient(It.IsAny())).Returns(blobClient.Object); + + using FileStream stream = File.OpenRead( + $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); + BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); + Response downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); + + if (IsAsync) + { + blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); + } + else + { + blobClient.Setup(r => r.Download()).Returns(downloadResponse); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); + + containerClient.Setup(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(asyncPageable); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); + + containerClient.Setup(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)).Returns(pageable); + } + + if (IsAsync) + { + AsyncPageable asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); + AsyncPageable asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + It.IsAny(), + default)) + .Returns(asyncPageable) + .Returns(asyncPageable2); + } + else + { + Pageable pageable = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); + + Pageable pageable2 = + PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); + + containerClient.SetupSequence(r => r.GetBlobsByHierarchy( + default, + default, + default, + It.IsAny(), + default)) + .Returns(pageable) + .Returns(pageable2); + } + + segmentFactory.SetupSequence(r => r.BuildSegment( + It.IsAny(), + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(segments[0].Object)) + .Returns(Task.FromResult(segments[1].Object)); + + List events = new List(); + for (int i = 0; i < eventCount; i++) + { + events.Add(new BlobChangeFeedEvent + { + Id = Guid.NewGuid() + }); + } + + segments[0].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[0] + })); + + segments[1].SetupSequence(r => r.GetPage( + It.IsAny(), + It.IsAny(), + default)) + .Returns(Task.FromResult(new List + { + events[1] + })); + + segments[0].SetupSequence(r => r.HasNext()) + .Returns(false); + segments[1].SetupSequence(r => r.HasNext()) + .Returns(true) + .Returns(false); + + segments[1].Setup(r => r.GetCursor()) + .Returns(new SegmentCursor()); + + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( + IsAsync, + serviceClient.Object, + startTime: new DateTimeOffset(2019, 6, 1, 0, 0, 0, TimeSpan.Zero)); + + // Act + Page page = await changeFeed.GetPage(IsAsync); + + // Assert + Assert.AreEqual(2, page.Values.Count); + Assert.AreEqual(events[0].Id, page.Values[0].Id); + Assert.AreEqual(events[1].Id, page.Values[1].Id); + + serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); + containerClient.Verify(r => r.Uri); + + if (IsAsync) + { + containerClient.Verify(r => r.ExistsAsync(default)); + } + else + { + containerClient.Verify(r => r.Exists(default)); + } + + containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); + + if (IsAsync) + { + blobClient.Verify(r => r.DownloadAsync()); + } + else + { + blobClient.Verify(r => r.Download()); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + "/", + Constants.ChangeFeed.SegmentPrefix, + default)); + } + + if (IsAsync) + { + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchyAsync( + default, + default, + default, + "idx/segments/2020/", + default)); + } + else + { + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2019/", + default)); + + containerClient.Verify(r => r.GetBlobsByHierarchy( + default, + default, + default, + "idx/segments/2020/", + default)); + } + + // ChangeFeeed.Next() verifies. + segments[0].Verify(r => r.HasNext(), Times.Exactly(1)); + + segments[0].Verify(r => r.GetPage( + IsAsync, + 512, + default)); + + segments[1].Verify(r => r.HasNext(), Times.Exactly(3)); + + segments[1].Verify(r => r.GetPage( + IsAsync, + 511, + default)); + + containerClient.Verify(r => r.Uri, Times.Exactly(1)); + + } + public static Task> GetYearsPathShortFuncAsync(string continuation, int? pageSizeHint) => Task.FromResult(GetYearsPathShortFunc(continuation, pageSizeHint)); From 20d364d147f617d55610652891d8f1f918e4ef48 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 27 May 2020 11:50:08 -0500 Subject: [PATCH 24/30] fixed ending the change feed --- .../Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs | 6 +++--- .../src/Models/BlobChangeFeedEventPage.cs | 5 +++++ .../tests/ChangeFeedTestBase.cs | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index af459890113b4..3e15151cd4c41 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -99,12 +99,12 @@ public async Task> GetPage( if (_currentSegment.DateTime > _endTime) { - return new BlobChangeFeedEventPage(); + return BlobChangeFeedEventPage.Empty(); } - if (_currentSegment.DateTime > _lastConsumable) + if (!_currentSegment.Finalized) { - return new BlobChangeFeedEventPage(); + return BlobChangeFeedEventPage.Empty(); } // Get next page diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs index 98bab185c172d..25077943c44ba 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Models/BlobChangeFeedEventPage.cs @@ -21,5 +21,10 @@ public BlobChangeFeedEventPage(List events, string continua Values = events; ContinuationToken = continuationToken; } + + public static BlobChangeFeedEventPage Empty() + => new BlobChangeFeedEventPage( + new List(), + null); } } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs index 6c282adecccb1..aefdc00ff9894 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTestBase.cs @@ -18,7 +18,7 @@ public class ChangeFeedTestBase : StorageTestBase public ChangeFeedTestBase(bool async) : this(async, null) { } public ChangeFeedTestBase(bool async, RecordedTestMode? mode = null) - : base(async, RecordedTestMode.Live) + : base(async, mode) { } From 669337b09652cc2454c74edbcdcaf1d0bd2e8443 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 27 May 2020 12:01:30 -0500 Subject: [PATCH 25/30] PR comments --- .../src/Segment.cs | 2 +- .../tests/ChangeFeedTests.cs | 17 +++++++++++++++++ .../NoSegmentsRemainingInStartYear.json | 4 ++++ .../NoSegmentsRemainingInStartYearAsync.json | 4 ++++ .../ChangeFeedTests/NoYearsAfterStartTime.json | 4 ++++ .../NoYearsAfterStartTimeAsync.json | 4 ++++ 6 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json create mode 100644 sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 788d784307403..305a4bb0ef724 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -15,7 +15,7 @@ internal class Segment /// /// If this Segment is finalized. /// - public bool Finalized { get; private set; } + public virtual bool Finalized { get; private set; } /// /// The time (to the nearest hour) associated with this Segment. diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index f71de5a728df0..3fe9588b2caba 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -433,6 +433,12 @@ public async Task GetPage() events[7] })); + for (int i = 0; i < segments.Count; i++) + { + segments[i].Setup(r => r.Finalized) + .Returns(true); + } + long chunkIndex = 1; long blockOffset = 2; long eventIndex = 3; @@ -596,6 +602,11 @@ public async Task GetPage() segments[1].Verify(r => r.GetCursor()); segments[3].Verify(r => r.GetCursor()); + segments[0].Verify(r => r.Finalized, Times.Exactly(1)); + segments[1].Verify(r => r.Finalized, Times.Exactly(1)); + segments[2].Verify(r => r.Finalized, Times.Exactly(0)); + segments[3].Verify(r => r.Finalized, Times.Exactly(0)); + containerClient.Verify(r => r.Uri, Times.Exactly(2)); } @@ -865,6 +876,12 @@ public async Task NoSegmentsRemainingInStartYear() segments[1].Setup(r => r.GetCursor()) .Returns(new SegmentCursor()); + for (int i = 0; i < segments.Count; i++) + { + segments[i].Setup(r => r.Finalized) + .Returns(true); + } + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYear.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoSegmentsRemainingInStartYearAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTime.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json new file mode 100644 index 0000000000000..20e5015d169f8 --- /dev/null +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SessionRecords/ChangeFeedTests/NoYearsAfterStartTimeAsync.json @@ -0,0 +1,4 @@ +{ + "Entries": [], + "Variables": {} +} \ No newline at end of file From 1a07ef11beaaad94a0cf7b29a877d34050bd81d1 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Wed, 27 May 2020 12:18:18 -0500 Subject: [PATCH 26/30] PR comments --- .../src/Segment.cs | 28 +++++++++++++------ .../src/SegmentFactory.cs | 1 - 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 305a4bb0ef724..03766d5982f1a 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -23,14 +23,14 @@ internal class Segment public DateTimeOffset DateTime { get; private set; } /// - /// Container client for listing Shards. + /// The Shards associated with this Segment. /// - private readonly BlobContainerClient _containerClient; + private readonly List _shards; /// - /// The Shards associated with this Segment. + /// The Shards we have finished reading from. /// - private readonly List _shards; + private readonly HashSet _finishedShards; /// /// The index of the Shard we will return the next event from. @@ -38,17 +38,16 @@ internal class Segment private int _shardIndex; public Segment( - BlobContainerClient containerClient, List shards, int shardIndex, DateTimeOffset dateTime, bool finalized) { - _containerClient = containerClient; _shards = shards; _shardIndex = shardIndex; DateTime = dateTime; Finalized = finalized; + _finishedShards = new HashSet(); } public virtual SegmentCursor GetCursor() @@ -79,6 +78,13 @@ public virtual async Task> GetPage( int i = 0; while (i < pageSize && _shards.Count > 0) { + // If this Shard is finished, skip it. + if (_finishedShards.Contains(_shardIndex)) + { + _shardIndex++; + continue; + } + Shard currentShard = _shards[_shardIndex]; BlobChangeFeedEvent changeFeedEvent = await currentShard.Next(async, cancellationToken).ConfigureAwait(false); @@ -88,7 +94,7 @@ public virtual async Task> GetPage( // If the current shard is completed, remove it from _shards if (!currentShard.HasNext()) { - _shards.RemoveAt(_shardIndex); + _finishedShards.Add(_shardIndex); } i++; @@ -97,13 +103,19 @@ public virtual async Task> GetPage( { _shardIndex = 0; } + + // If all the Shards are finished, we need to break out early. + if (_finishedShards.Count == _shards.Count) + { + break; + } } return changeFeedEventList; } public virtual bool HasNext() - => _shards.Count > 0; + => _finishedShards.Count < _shards.Count; /// /// Constructor for mocking. diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs index c40bbcfb90b95..769c744253a58 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs @@ -83,7 +83,6 @@ public virtual async Task BuildSegment( } return new Segment( - containerClient, shards, shardIndex, dateTime, From c942c1ffe1f436abdb5d5cc8111c06f7295e3751 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 28 May 2020 13:00:36 -0500 Subject: [PATCH 27/30] Changed default page size --- sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs | 5 +++++ .../Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs | 4 ++-- sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 3e15151cd4c41..b0f85507efdd1 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -107,6 +107,11 @@ public async Task> GetPage( return BlobChangeFeedEventPage.Empty(); } + if (pageSize > Constants.ChangeFeed.DefaultPageSize) + { + pageSize = Constants.ChangeFeed.DefaultPageSize; + } + // Get next page List blobChangeFeedEvents = new List(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 3fe9588b2caba..01cd4927ff57d 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -976,14 +976,14 @@ public async Task NoSegmentsRemainingInStartYear() segments[0].Verify(r => r.GetPage( IsAsync, - 512, + Constants.ChangeFeed.DefaultPageSize, default)); segments[1].Verify(r => r.HasNext(), Times.Exactly(3)); segments[1].Verify(r => r.GetPage( IsAsync, - 511, + Constants.ChangeFeed.DefaultPageSize - 1, default)); containerClient.Verify(r => r.Uri, Times.Exactly(1)); diff --git a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs index 194e20b638d2d..bec56ed09c60c 100644 --- a/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs +++ b/sdk/storage/Azure.Storage.Common/src/Shared/Constants.cs @@ -313,7 +313,7 @@ internal static class ChangeFeed public const string InitalizationSegment = "1601"; public const string MetaSegmentsPath = "meta/segments.json"; public const long ChunkBlockDownloadSize = MB; - public const int DefaultPageSize = 512; + public const int DefaultPageSize = 5000; internal static class Event { From 83dba4d9556eaa4304a6cf7c5814490903c944e1 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Thu, 28 May 2020 16:02:49 -0500 Subject: [PATCH 28/30] PR comments --- .../src/BlobChangeFeedAsyncPagable.cs | 40 +++++++++++-------- .../src/BlobChangeFeedPagable.cs | 39 ++++++++++-------- 2 files changed, 47 insertions(+), 32 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs index 03091d9c2ad8d..668c9977e5cdc 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs @@ -26,8 +26,6 @@ public class BlobChangeFeedAsyncPagable : AsyncPageable private readonly DateTimeOffset? _endTime; private readonly string _continuation; - private ChangeFeed _changeFeed; - /// /// Internal constructor. /// @@ -52,28 +50,38 @@ internal BlobChangeFeedAsyncPagable( } /// - /// AsPages. + /// Returns s as Pages. /// - /// - /// - /// + /// + /// Throws an . To use contination, call + /// . + /// + /// + /// Page size. + /// + /// + /// . + /// public override async IAsyncEnumerable> AsPages( string continuationToken = null, int? pageSizeHint = null) { - if (_changeFeed == null) + if (continuationToken != null) { - _changeFeed = await _changeFeedFactory.BuildChangeFeed( - async: true, - _blobServiceClient, - _startTime, - _endTime, - _continuation) - .ConfigureAwait(false); + throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChangesAsync(string) instead"); } - while (_changeFeed.HasNext()) + + ChangeFeed changeFeed = await _changeFeedFactory.BuildChangeFeed( + async: true, + _blobServiceClient, + _startTime, + _endTime, + _continuation) + .ConfigureAwait(false); + + while (changeFeed.HasNext()) { - yield return await _changeFeed.GetPage( + yield return await changeFeed.GetPage( async: true, pageSize: pageSizeHint ?? 512).ConfigureAwait(false); } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs index bdf0291799db8..822ba4c72ba97 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs @@ -22,8 +22,6 @@ public class BlobChangeFeedPagable : Pageable private readonly DateTimeOffset? _endTime; private readonly string _continuation; - private ChangeFeed _changeFeed; - internal BlobChangeFeedPagable( BlobServiceClient blobBerviceClient, DateTimeOffset? startTime = default, @@ -45,27 +43,36 @@ internal BlobChangeFeedPagable( } /// - /// AsPages. + /// Returns s as Pages. /// - /// - /// - /// + /// + /// Throws an . To use contination, call + /// . + /// + /// + /// Page size. + /// + /// + /// . + /// public override IEnumerable> AsPages(string continuationToken = null, int? pageSizeHint = null) { - if (_changeFeed == null) + if (continuationToken != null) { - _changeFeed = _changeFeedFactory.BuildChangeFeed( - async: false, - _blobServiceClient, - _startTime, - _endTime, - _continuation) - .EnsureCompleted(); + throw new ArgumentException($"Continuation not supported. Use BlobChangeFeedClient.GetChanges(string) instead"); } - while (_changeFeed.HasNext()) + ChangeFeed changeFeed = _changeFeedFactory.BuildChangeFeed( + async: false, + _blobServiceClient, + _startTime, + _endTime, + _continuation) + .EnsureCompleted(); + + while (changeFeed.HasNext()) { - yield return _changeFeed.GetPage( + yield return changeFeed.GetPage( async: false, pageSize: pageSizeHint ?? 512).EnsureCompleted(); } From 04431b3d7368f900a50f7cf0a0a8c0d1f2056339 Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Fri, 29 May 2020 12:59:08 -0500 Subject: [PATCH 29/30] PR comments --- .../src/BlobChangeFeedAsyncPagable.cs | 9 ++--- .../src/BlobChangeFeedPagable.cs | 13 +++---- .../src/ChangeFeed.cs | 19 ++++----- .../src/ChangeFeedFactory.cs | 39 +++++++++++-------- .../src/ChunkFactory.cs | 6 ++- .../src/Segment.cs | 6 +++ .../src/SegmentFactory.cs | 10 +++-- .../src/Shard.cs | 3 +- .../src/ShardFactory.cs | 14 ++++--- .../tests/ChangeFeedFactoryTests.cs | 6 ++- .../tests/ChangeFeedTests.cs | 39 ++++++++----------- .../tests/ChunkTests.cs | 7 ++-- .../tests/SegmentTests.cs | 14 +++---- .../tests/ShardTests.cs | 38 +++++++----------- 14 files changed, 114 insertions(+), 109 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs index 668c9977e5cdc..dce79b08a94be 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedAsyncPagable.cs @@ -30,12 +30,12 @@ public class BlobChangeFeedAsyncPagable : AsyncPageable /// Internal constructor. /// internal BlobChangeFeedAsyncPagable( - BlobServiceClient blobBerviceClient, + BlobServiceClient blobServiceClient, DateTimeOffset? startTime = default, DateTimeOffset? endTime = default) { - _changeFeedFactory = new ChangeFeedFactory(); - _blobServiceClient = blobBerviceClient; + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; _startTime = startTime; _endTime = endTime; } @@ -44,7 +44,7 @@ internal BlobChangeFeedAsyncPagable( BlobServiceClient blobServiceClient, string continuation) { - _changeFeedFactory = new ChangeFeedFactory(); + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); _blobServiceClient = blobServiceClient; _continuation = continuation; } @@ -73,7 +73,6 @@ public override async IAsyncEnumerable> AsPages( ChangeFeed changeFeed = await _changeFeedFactory.BuildChangeFeed( async: true, - _blobServiceClient, _startTime, _endTime, _continuation) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs index 822ba4c72ba97..cde36551fdfa6 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/BlobChangeFeedPagable.cs @@ -23,22 +23,22 @@ public class BlobChangeFeedPagable : Pageable private readonly string _continuation; internal BlobChangeFeedPagable( - BlobServiceClient blobBerviceClient, + BlobServiceClient blobServiceClient, DateTimeOffset? startTime = default, DateTimeOffset? endTime = default) { - _changeFeedFactory = new ChangeFeedFactory(); - _blobServiceClient = blobBerviceClient; + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; _startTime = startTime; _endTime = endTime; } internal BlobChangeFeedPagable( - BlobServiceClient blobBerviceClient, + BlobServiceClient blobServiceClient, string continuation) { - _changeFeedFactory = new ChangeFeedFactory(); - _blobServiceClient = blobBerviceClient; + _changeFeedFactory = new ChangeFeedFactory(blobServiceClient); + _blobServiceClient = blobServiceClient; _continuation = continuation; } @@ -64,7 +64,6 @@ public override IEnumerable> AsPages(string continuati ChangeFeed changeFeed = _changeFeedFactory.BuildChangeFeed( async: false, - _blobServiceClient, _startTime, _endTime, _continuation) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index b0f85507efdd1..5c658a6016957 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -133,20 +133,23 @@ public async Task> GetPage( public bool HasNext() { - if (_empty) + // [If Change Feed is empty], or [current segment is not finalized] + // or ([segment count is 0] and [year count is 0] and [current segment doesn't have next]) + if (_empty + || !_currentSegment.Finalized + || _segments.Count == 0 + && _years.Count == 0 + && !_currentSegment.HasNext()) { return false; } - // We have no more segments, years, and the current segment doesn't have hext. - if (_segments.Count == 0 && _years.Count == 0 && !_currentSegment.HasNext()) + if (_endTime.HasValue) { - return false; + return _currentSegment.DateTime <= _endTime; } - DateTimeOffset end = BlobChangeFeedExtensions.MinDateTime(_lastConsumable, _endTime); - - return _currentSegment.DateTime <= end; + return true; } public DateTimeOffset LastConsumable() @@ -218,7 +221,6 @@ private async Task AdvanceSegmentIfNecessary(bool async) { _currentSegment = await _segmentFactory.BuildSegment( async, - _containerClient, _segments.Dequeue()).ConfigureAwait(false); } @@ -239,7 +241,6 @@ private async Task AdvanceSegmentIfNecessary(bool async) { _currentSegment = await _segmentFactory.BuildSegment( async, - _containerClient, _segments.Dequeue()) .ConfigureAwait(false); } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs index 2d8bd1c43f775..1b43a443a3b47 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeedFactory.cs @@ -14,29 +14,36 @@ namespace Azure.Storage.Blobs.ChangeFeed internal class ChangeFeedFactory { private readonly SegmentFactory _segmentFactory; + private readonly BlobContainerClient _containerClient; - public ChangeFeedFactory() + public ChangeFeedFactory( + BlobServiceClient blobServiceClient) { + _containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); _segmentFactory = new SegmentFactory( + _containerClient, new ShardFactory( + _containerClient, new ChunkFactory( + _containerClient, new LazyLoadingBlobStreamFactory(), new AvroReaderFactory()))); } - public ChangeFeedFactory(SegmentFactory segmentFactory) + public ChangeFeedFactory( + BlobContainerClient containerClient, + SegmentFactory segmentFactory) { + _containerClient = containerClient; _segmentFactory = segmentFactory; } public async Task BuildChangeFeed( bool async, - BlobServiceClient blobServiceClient, DateTimeOffset? startTime = default, DateTimeOffset? endTime = default, string continuation = default) { - BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName); DateTimeOffset lastConsumable; Queue years = new Queue(); Queue segments = new Queue(); @@ -46,7 +53,7 @@ public async Task BuildChangeFeed( if (continuation != null) { cursor = JsonSerializer.Deserialize(continuation); - ValidateCursor(containerClient, cursor); + ValidateCursor(_containerClient, cursor); startTime = cursor.CurrentSegmentCursor.SegmentTime; endTime = cursor.EndTime; } @@ -62,11 +69,11 @@ public async Task BuildChangeFeed( if (async) { - changeFeedContainerExists = await containerClient.ExistsAsync().ConfigureAwait(false); + changeFeedContainerExists = await _containerClient.ExistsAsync().ConfigureAwait(false); } else { - changeFeedContainerExists = containerClient.Exists(); + changeFeedContainerExists = _containerClient.Exists(); } if (!changeFeedContainerExists) @@ -75,7 +82,7 @@ public async Task BuildChangeFeed( } // Get last consumable - BlobClient blobClient = containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); + BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); BlobDownloadInfo blobDownloadInfo; if (async) { @@ -99,7 +106,7 @@ public async Task BuildChangeFeed( lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); // Get year paths - years = await GetYearPaths(async, containerClient).ConfigureAwait(false); + years = await GetYearPaths(async).ConfigureAwait(false); // Dequeue any years that occur before start time if (startTime.HasValue) @@ -122,7 +129,7 @@ public async Task BuildChangeFeed( // Get Segments for year segments = await BlobChangeFeedExtensions.GetSegmentsInYear( async: async, - containerClient: containerClient, + containerClient: _containerClient, yearPath: years.Dequeue(), startTime: startTime, endTime: BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime)) @@ -137,13 +144,12 @@ public async Task BuildChangeFeed( Segment currentSegment = await _segmentFactory.BuildSegment( async, - containerClient, segments.Dequeue(), cursor?.CurrentSegmentCursor) .ConfigureAwait(false); return new ChangeFeed( - containerClient, + _containerClient, _segmentFactory, years, segments, @@ -163,15 +169,14 @@ private static void ValidateCursor( } } - internal static async Task> GetYearPaths( - bool async, - BlobContainerClient containerClient) + internal async Task> GetYearPaths( + bool async) { List list = new List(); if (async) { - await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( prefix: Constants.ChangeFeed.SegmentPrefix, delimiter: "/").ConfigureAwait(false)) { @@ -183,7 +188,7 @@ internal static async Task> GetYearPaths( } else { - foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( prefix: Constants.ChangeFeed.SegmentPrefix, delimiter: "/")) { diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs index 6c9aa77e80ad1..b400045f1a171 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChunkFactory.cs @@ -10,22 +10,24 @@ internal class ChunkFactory { private readonly LazyLoadingBlobStreamFactory _lazyLoadingBlobStreamFactory; private readonly AvroReaderFactory _avroReaderFactory; + private readonly BlobContainerClient _containerClient; public ChunkFactory( + BlobContainerClient containerClient, LazyLoadingBlobStreamFactory lazyLoadingBlobStreamFactory, AvroReaderFactory avroReaderFactory) { + _containerClient = containerClient; _lazyLoadingBlobStreamFactory = lazyLoadingBlobStreamFactory; _avroReaderFactory = avroReaderFactory; } public virtual Chunk BuildChunk( - BlobContainerClient containerClient, string chunkPath, long? blockOffset = default, long? eventIndex = default) { - BlobClient blobClient = containerClient.GetBlobClient(chunkPath); + BlobClient blobClient = _containerClient.GetBlobClient(chunkPath); blockOffset ??= 0; eventIndex ??= 0; AvroReader avroReader; diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs index 03766d5982f1a..a8fa1955e904e 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Segment.cs @@ -82,6 +82,12 @@ public virtual async Task> GetPage( if (_finishedShards.Contains(_shardIndex)) { _shardIndex++; + + if (_shardIndex == _shards.Count) + { + _shardIndex = 0; + } + continue; } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs index 769c744253a58..11849d500c196 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/SegmentFactory.cs @@ -13,6 +13,7 @@ namespace Azure.Storage.Blobs.ChangeFeed { internal class SegmentFactory { + private readonly BlobContainerClient _containerClient; private readonly ShardFactory _shardFactory; /// @@ -20,8 +21,11 @@ internal class SegmentFactory /// public SegmentFactory() { } - public SegmentFactory(ShardFactory shardFactory) + public SegmentFactory( + BlobContainerClient containerClient, + ShardFactory shardFactory) { + _containerClient = containerClient; _shardFactory = shardFactory; } @@ -29,7 +33,6 @@ public SegmentFactory(ShardFactory shardFactory) public virtual async Task BuildSegment( #pragma warning restore CA1822 // Can't mock static methods in MOQ. bool async, - BlobContainerClient containerClient, string manifestPath, SegmentCursor cursor = default) { @@ -39,7 +42,7 @@ public virtual async Task BuildSegment( int shardIndex = cursor?.ShardIndex ?? 0; // Download segment manifest - BlobClient blobClient = containerClient.GetBlobClient(manifestPath); + BlobClient blobClient = _containerClient.GetBlobClient(manifestPath); BlobDownloadInfo blobDownloadInfo; if (async) @@ -73,7 +76,6 @@ public virtual async Task BuildSegment( string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); Shard shard = await _shardFactory.BuildShard( async, - containerClient, shardPath, cursor?.ShardCursors?[i]) .ConfigureAwait(false); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs index ea9a26f3e1fc5..19109ad8a28fe 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/Shard.cs @@ -72,7 +72,6 @@ public virtual async Task Next( if (!_currentChunk.HasNext() && _chunks.Count > 0) { _currentChunk = _chunkFactory.BuildChunk( - _containerClient, _chunks.Dequeue()); _chunkIndex++; } @@ -80,7 +79,7 @@ public virtual async Task Next( } /// - /// Constructor for use by . + /// Constructor for use by . /// public Shard( BlobContainerClient containerClient, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs index 87c0df08f079c..c1185171b1cbe 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ShardFactory.cs @@ -14,9 +14,13 @@ namespace Azure.Storage.Blobs.ChangeFeed internal class ShardFactory { private readonly ChunkFactory _chunkFactory; + private readonly BlobContainerClient _containerClient; - public ShardFactory(ChunkFactory chunkFactory) + public ShardFactory( + BlobContainerClient containerClient, + ChunkFactory chunkFactory) { + _containerClient = containerClient; _chunkFactory = chunkFactory; } @@ -29,7 +33,6 @@ public ShardFactory() { } public virtual async Task BuildShard( #pragma warning restore CA1822 // Can't mock static methods in MOQ. bool async, - BlobContainerClient containerClient, string shardPath, ShardCursor shardCursor = default) { @@ -42,7 +45,7 @@ public virtual async Task BuildShard( // Get Chunks if (async) { - await foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchyAsync( + await foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchyAsync( prefix: shardPath).ConfigureAwait(false)) { if (blobHierarchyItem.IsPrefix) @@ -54,7 +57,7 @@ public virtual async Task BuildShard( } else { - foreach (BlobHierarchyItem blobHierarchyItem in containerClient.GetBlobsByHierarchy( + foreach (BlobHierarchyItem blobHierarchyItem in _containerClient.GetBlobsByHierarchy( prefix: shardPath)) { if (blobHierarchyItem.IsPrefix) @@ -75,13 +78,12 @@ public virtual async Task BuildShard( } Chunk currentChunk = _chunkFactory.BuildChunk( - containerClient, chunks.Dequeue(), blockOffset, eventIndex); return new Shard( - containerClient, + _containerClient, _chunkFactory, chunks, currentChunk, diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs index 6b1d5fd1417c1..99e4e235a9958 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedFactoryTests.cs @@ -49,8 +49,12 @@ public async Task GetYearPathsTest() default)).Returns(pageable); } + Mock segmentFactory = new Mock(); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, segmentFactory.Object); + // Act - Queue years = await ChangeFeedFactory.GetYearPaths(IsAsync, containerClient.Object).ConfigureAwait(false); + Queue years = await changeFeedFactory.GetYearPaths(IsAsync).ConfigureAwait(false); // Assert Queue expectedYears = new Queue(); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs index 01cd4927ff57d..b23e38165a749 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChangeFeedTests.cs @@ -119,7 +119,6 @@ public async Task GetCursor() segmentFactory.Setup(r => r.BuildSegment( It.IsAny(), - It.IsAny(), It.IsAny(), It.IsAny())) .ReturnsAsync(segment.Object); @@ -150,12 +149,13 @@ public async Task GetCursor() endDateTime: endDateTime, currentSegmentCursor: segmentCursor); - ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); // Act ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, - serviceClient.Object, continuation: JsonSerializer.Serialize(expectedCursor)); ChangeFeedCursor actualCursor = changeFeed.GetCursor(); @@ -173,7 +173,6 @@ public async Task GetCursor() Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex); - serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); containerClient.Verify(r => r.Uri); if (IsAsync) @@ -236,7 +235,6 @@ public async Task GetCursor() segmentFactory.Verify(r => r.BuildSegment( IsAsync, - containerClient.Object, "idx/segments/2020/01/16/2300/meta.json", It.Is( r => r.SegmentTime == segmentTime @@ -362,7 +360,6 @@ public async Task GetPage() segmentFactory.SetupSequence(r => r.BuildSegment( It.IsAny(), - It.IsAny(), It.IsAny(), default)) .Returns(Task.FromResult(segments[0].Object)) @@ -468,10 +465,11 @@ public async Task GetPage() segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor); segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor); - ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( - IsAsync, - serviceClient.Object); + IsAsync); // Act Page page0 = await changeFeed.GetPage(IsAsync, 3); @@ -493,7 +491,6 @@ public async Task GetPage() } // ChangeFeedFactory.BuildChangeFeed() verifies - serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); containerClient.Verify(r => r.Uri); if (IsAsync) @@ -602,10 +599,10 @@ public async Task GetPage() segments[1].Verify(r => r.GetCursor()); segments[3].Verify(r => r.GetCursor()); - segments[0].Verify(r => r.Finalized, Times.Exactly(1)); - segments[1].Verify(r => r.Finalized, Times.Exactly(1)); - segments[2].Verify(r => r.Finalized, Times.Exactly(0)); - segments[3].Verify(r => r.Finalized, Times.Exactly(0)); + segments[0].Verify(r => r.Finalized, Times.Exactly(3)); + segments[1].Verify(r => r.Finalized, Times.Exactly(4)); + segments[2].Verify(r => r.Finalized, Times.Exactly(1)); + segments[3].Verify(r => r.Finalized, Times.Exactly(2)); containerClient.Verify(r => r.Uri, Times.Exactly(2)); } @@ -673,10 +670,11 @@ public async Task NoYearsAfterStartTime() default)).Returns(pageable); } - ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, - serviceClient.Object, startTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero)); // Act @@ -685,8 +683,6 @@ public async Task NoYearsAfterStartTime() // Assert Assert.IsFalse(hasNext); - serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); - if (IsAsync) { containerClient.Verify(r => r.ExistsAsync(default)); @@ -834,7 +830,6 @@ public async Task NoSegmentsRemainingInStartYear() segmentFactory.SetupSequence(r => r.BuildSegment( It.IsAny(), - It.IsAny(), It.IsAny(), default)) .Returns(Task.FromResult(segments[0].Object)) @@ -882,10 +877,11 @@ public async Task NoSegmentsRemainingInStartYear() .Returns(true); } - ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory(segmentFactory.Object); + ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( + containerClient.Object, + segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, - serviceClient.Object, startTime: new DateTimeOffset(2019, 6, 1, 0, 0, 0, TimeSpan.Zero)); // Act @@ -896,7 +892,6 @@ public async Task NoSegmentsRemainingInStartYear() Assert.AreEqual(events[0].Id, page.Values[0].Id); Assert.AreEqual(events[1].Id, page.Values[1].Id); - serviceClient.Verify(r => r.GetBlobContainerClient(Constants.ChangeFeed.ChangeFeedContainerName)); containerClient.Verify(r => r.Uri); if (IsAsync) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs index b1dccce4bf737..71cff0933289f 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ChunkTests.cs @@ -47,10 +47,10 @@ public void HasNext_True() avroReader.Setup(r => r.HasNext()).Returns(true); ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, lazyLoadingBlobStreamFactory.Object, avroReaderFactory.Object); Chunk chunk = chunkFactory.BuildChunk( - containerClient.Object, chunkPath); // Act @@ -94,10 +94,11 @@ public void HasNext_False() avroReader.Setup(r => r.HasNext()).Returns(false); ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, lazyLoadingBlobStreamFactory.Object, avroReaderFactory.Object); Chunk chunk = chunkFactory.BuildChunk( - containerClient.Object, + chunkPath); // Act @@ -206,10 +207,10 @@ public async Task Next() avroReader.Setup(r => r.ObjectIndex).Returns(eventIndex); ChunkFactory chunkFactory = new ChunkFactory( + containerClient.Object, lazyLoadingBlobStreamFactory.Object, avroReaderFactory.Object); Chunk chunk = chunkFactory.BuildChunk( - containerClient.Object, chunkPath, blockOffset, eventIndex); diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs index fdfefcd969909..7afe32cdbd6d0 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/SegmentTests.cs @@ -75,7 +75,6 @@ public async Task GetCursor() shardFactory.SetupSequence(r => r.BuildShard( It.IsAny(), - It.IsAny(), It.IsAny(), It.IsAny())) .ReturnsAsync(shards[0].Object) @@ -87,10 +86,11 @@ public async Task GetCursor() shards[i].Setup(r => r.GetCursor()).Returns(shardCursors[i]); } - SegmentFactory segmentFactory = new SegmentFactory(shardFactory.Object); + SegmentFactory segmentFactory = new SegmentFactory( + containerClient.Object, + shardFactory.Object); Segment segment = await segmentFactory.BuildSegment( IsAsync, - containerClient.Object, manifestPath, expectedCursor); @@ -123,7 +123,6 @@ public async Task GetCursor() { shardFactory.Verify(r => r.BuildShard( IsAsync, - containerClient.Object, $"log/0{i}/2020/03/25/0200/", shardCursors[i])); } @@ -178,7 +177,6 @@ public async Task GetPage() shardFactory.SetupSequence(r => r.BuildShard( It.IsAny(), - It.IsAny(), It.IsAny(), It.IsAny())) .ReturnsAsync(shards[0].Object) @@ -223,10 +221,11 @@ public async Task GetPage() shards[2].Setup(r => r.HasNext()) .Returns(false); - SegmentFactory segmentFactory = new SegmentFactory(shardFactory.Object); + SegmentFactory segmentFactory = new SegmentFactory( + containerClient.Object, + shardFactory.Object); Segment segment = await segmentFactory.BuildSegment( IsAsync, - containerClient.Object, manifestPath); // Act @@ -253,7 +252,6 @@ public async Task GetPage() { shardFactory.Verify(r => r.BuildShard( IsAsync, - containerClient.Object, $"log/0{i}/2020/03/25/0200/", default)); } diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs index 757fb143fcf54..1a7398f3000ae 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/tests/ShardTests.cs @@ -65,7 +65,6 @@ public async Task GetCursor() } chunkFactory.Setup(r => r.BuildChunk( - It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) @@ -74,12 +73,13 @@ public async Task GetCursor() chunk.Setup(r => r.BlockOffset).Returns(blockOffset); chunk.Setup(r => r.EventIndex).Returns(eventIndex); - ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); // Act Shard shard = await shardFactory.BuildShard( IsAsync, - containerClient.Object, shardPath, shardCursor) .ConfigureAwait(false); @@ -112,7 +112,6 @@ public async Task GetCursor() } chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk2", blockOffset, eventIndex)); @@ -166,7 +165,6 @@ public async Task HasNext_False() } chunkFactory.Setup(r => r.BuildChunk( - It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) @@ -174,12 +172,13 @@ public async Task HasNext_False() chunk.Setup(r => r.HasNext()).Returns(false); - ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); // Act Shard shard = await shardFactory.BuildShard( IsAsync, - containerClient.Object, shardPath, shardCursor) .ConfigureAwait(false); @@ -209,7 +208,6 @@ public async Task HasNext_False() } chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk5", blockOffset, eventIndex)); @@ -262,18 +260,18 @@ public async Task HasNext_ChunksLeft() } chunkFactory.Setup(r => r.BuildChunk( - It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) .Returns(chunk.Object); - ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); // Act Shard shard = await shardFactory.BuildShard( IsAsync, - containerClient.Object, shardPath, shardCursor) .ConfigureAwait(false); @@ -303,7 +301,6 @@ public async Task HasNext_ChunksLeft() } chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk2", blockOffset, eventIndex)); @@ -354,7 +351,6 @@ public async Task HasNext_CurrentChunkHasNext() } chunkFactory.Setup(r => r.BuildChunk( - It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) @@ -362,12 +358,13 @@ public async Task HasNext_CurrentChunkHasNext() chunk.Setup(r => r.HasNext()).Returns(true); - ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); // Act Shard shard = await shardFactory.BuildShard( IsAsync, - containerClient.Object, shardPath, shardCursor) .ConfigureAwait(false); @@ -397,7 +394,6 @@ public async Task HasNext_CurrentChunkHasNext() } chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk5", blockOffset, eventIndex)); @@ -464,7 +460,6 @@ public async Task Next() } chunkFactory.SetupSequence(r => r.BuildChunk( - It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) @@ -504,12 +499,13 @@ public async Task Next() chunks[2].Setup(r => r.BlockOffset).Returns(blockOffset); chunks[2].Setup(r => r.EventIndex).Returns(eventIndex); - ShardFactory shardFactory = new ShardFactory(chunkFactory.Object); + ShardFactory shardFactory = new ShardFactory( + containerClient.Object, + chunkFactory.Object); // Act Shard shard = await shardFactory.BuildShard( IsAsync, - containerClient.Object, shardPath, shardCursor) .ConfigureAwait(false); @@ -554,22 +550,18 @@ public async Task Next() } chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk2", blockOffset, eventIndex)); chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk3", default, default)); chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk4", default, default)); chunkFactory.Verify(r => r.BuildChunk( - containerClient.Object, "chunk5", default, default)); From 74e6e051f8d6f07a8e4ca6826f5be9dc72b5098b Mon Sep 17 00:00:00 2001 From: Sean McCullough Date: Mon, 1 Jun 2020 15:47:16 -0500 Subject: [PATCH 30/30] PR comments --- sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs index 5c658a6016957..9cab135ffad16 100644 --- a/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs +++ b/sdk/storage/Azure.Storage.Blobs.ChangeFeed/src/ChangeFeed.cs @@ -97,7 +97,7 @@ public async Task> GetPage( throw new InvalidOperationException("Change feed doesn't have any more events"); } - if (_currentSegment.DateTime > _endTime) + if (_currentSegment.DateTime >= _endTime) { return BlobChangeFeedEventPage.Empty(); } @@ -146,7 +146,7 @@ public bool HasNext() if (_endTime.HasValue) { - return _currentSegment.DateTime <= _endTime; + return _currentSegment.DateTime < _endTime; } return true;