From 02a2bfc3b67de85f52ecf7db07f866524428a420 Mon Sep 17 00:00:00 2001
From: Jacob Lauzon <96087589+jalauzon-msft@users.noreply.github.com>
Date: Wed, 1 Nov 2023 17:51:30 -0700
Subject: [PATCH] [Storage][DataMovement] Refactor Job Part Plan file schema
(#39606)
---
.../src/JobPartInternal.cs | 15 +-
.../src/JobPlan/FolderPropertiesMode.cs | 16 -
.../JobPlan/JobPartDeleteSnapshotsOption.cs | 28 -
.../JobPlan/JobPartPermanentDeleteOption.cs | 19 -
.../src/JobPlan/JobPartPlanBlockBlobTier.cs | 19 -
.../src/JobPlan/JobPartPlanDestinationBlob.cs | 284 ------
.../JobPlan/JobPartPlanDestinationLocal.cs | 28 -
.../src/JobPlan/JobPartPlanFileName.cs | 59 +-
.../src/JobPlan/JobPartPlanHeader.cs | 960 ++++--------------
.../src/JobPlan/JobPartPlanPageBlobTier.cs | 33 -
.../JobPartPlanRehydratePriorityType.cs | 12 -
.../src/JobPlan/JobPlanBlobType.cs | 25 -
.../src/JobPlan/JobPlanFile.cs | 2 +-
.../src/JobPlan/JobPlanHeader.cs | 30 +-
.../src/JobPlanExtensions.cs | 47 -
.../src/LocalTransferCheckpointer.cs | 49 +-
.../src/ServiceToServiceJobPart.cs | 4 +-
.../src/Shared/CheckpointerExtensions.cs | 19 +
.../src/Shared/DataMovementConstants.cs | 181 +---
.../src/Shared/DataMovementExtensions.cs | 76 +-
.../src/Shared/Errors.DataMovement.cs | 13 +-
.../src/StreamToUriJobPart.cs | 4 +-
.../src/TransferCheckpointer.cs | 2 -
.../src/UriToStreamJobPart.cs | 4 +-
.../tests/CheckpointerTesting.cs | 200 +---
.../tests/JobPartPlanFileNameTests.cs | 121 +--
.../tests/JobPartPlanHeaderTests.cs | 515 +---------
.../tests/JobPlanFileTests.cs | 4 +-
.../tests/LocalTransferCheckpointerFactory.cs | 180 +---
.../tests/LocalTransferCheckpointerTests.cs | 253 +----
.../SampleJobPartPlanFile.b3.ndmpart | Bin 0 -> 151 bytes
31 files changed, 453 insertions(+), 2749 deletions(-)
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/FolderPropertiesMode.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartDeleteSnapshotsOption.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPermanentDeleteOption.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanBlockBlobTier.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationBlob.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationLocal.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanPageBlobTier.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanRehydratePriorityType.cs
delete mode 100644 sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanBlobType.cs
create mode 100644 sdk/storage/Azure.Storage.DataMovement/tests/Resources/SampleJobPartPlanFile.b3.ndmpart
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPartInternal.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPartInternal.cs
index ce1bea76bdd0c..ea38713c0b76f 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPartInternal.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPartInternal.cs
@@ -451,20 +451,17 @@ public async virtual Task CleanupAbortedJobPartAsync()
///
/// Serializes the respective job part and adds it to the checkpointer.
///
- /// Number of chunks in the job part.
- ///
- public async virtual Task AddJobPartToCheckpointerAsync(int chunksTotal)
+ public async virtual Task AddJobPartToCheckpointerAsync()
{
- JobPartPlanHeader header = this.ToJobPartPlanHeader(jobStatus: JobPartStatus);
+ JobPartPlanHeader header = this.ToJobPartPlanHeader();
using (Stream stream = new MemoryStream())
{
header.Serialize(stream);
await _checkpointer.AddNewJobPartAsync(
- transferId: _dataTransfer.Id,
- partNumber: PartNumber,
- chunksTotal: chunksTotal,
- headerStream: stream,
- cancellationToken: _cancellationToken).ConfigureAwait(false);
+ transferId: _dataTransfer.Id,
+ partNumber: PartNumber,
+ headerStream: stream,
+ cancellationToken: _cancellationToken).ConfigureAwait(false);
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/FolderPropertiesMode.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/FolderPropertiesMode.cs
deleted file mode 100644
index fa77942a2b0c5..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/FolderPropertiesMode.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- ///
- /// SMB Feature whether to preserve permissions on the folder
- ///
- internal enum FolderPropertiesMode
- {
- None = 0,
- NoFolders = 1,
- AllFoldersExceptRoot = 2,
- AllFolders = 3,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartDeleteSnapshotsOption.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartDeleteSnapshotsOption.cs
deleted file mode 100644
index 6564189d9f9e7..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartDeleteSnapshotsOption.cs
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- ///
- /// Required if the blob has associated snapshots. Specify one of the following two options:
- /// include: Delete the base blob and all of its snapshots.
- /// only: Delete only the blob's snapshots and not the blob itself
- ///
- internal enum JobPartDeleteSnapshotsOption
- {
- ///
- /// none
- ///
- None = 0,
-
- ///
- /// include
- ///
- IncludeSnapshots = 1,
-
- ///
- /// only
- ///
- OnlySnapshots = 2,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPermanentDeleteOption.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPermanentDeleteOption.cs
deleted file mode 100644
index 9959910a9aa8c..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPermanentDeleteOption.cs
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- ///
- /// Permanent Delete Options
- ///
- /// TODO: Consider removing since the SDK does not support deleting
- /// customer data permanently
- ///
- internal enum JobPartPermanentDeleteOption
- {
- None = 0,
- Snapshots = 1,
- Versions = 2,
- SnapshotsAndVersions = 3,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanBlockBlobTier.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanBlockBlobTier.cs
deleted file mode 100644
index 358cad4c6675e..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanBlockBlobTier.cs
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- internal enum JobPartPlanBlockBlobTier
- {
- /// None.
- None = 0,
- /// Hot.
- Hot = 1,
- /// Cool.
- Cool = 2,
- /// Archive.
- Archive = 3,
- /// Cold.
- Cold = 4,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationBlob.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationBlob.cs
deleted file mode 100644
index 98567c159e473..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationBlob.cs
+++ /dev/null
@@ -1,284 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-using System.Collections.Generic;
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- ///
- /// Describes the structure of a destination blob.
- ///
- /// Comes to a total of 6311 bytes
- ///
- internal class JobPartPlanDestinationBlob
- {
- ///
- /// Blob Type
- ///
- public JobPlanBlobType BlobType;
-
- ///
- /// Represents user decision to interpret the content-encoding from source file
- ///
- public bool NoGuessMimeType;
-
- ///
- /// Specifies the length of MIME content type of the blob
- ///
- public ushort ContentTypeLength;
-
- ///
- /// Specifies the MIME content type of the blob. The default type is application/octet-stream
- ///
- public string ContentType;
-
- ///
- /// Specifies length of content encoding which have been applied to the blob.
- ///
- public ushort ContentEncodingLength;
-
- ///
- /// Specifies the MIME content type of the blob. The default type is application/octet-stream
- ///
- public string ContentEncoding;
-
- ///
- /// Specifies length of content language which has been applied to the blob.
- ///
- public ushort ContentLanguageLength;
-
- ///
- /// Specifies which content language has been applied to the blob.
- ///
- public string ContentLanguage;
-
- ///
- /// Specifies length of content disposition which has been applied to the blob.
- ///
- public ushort ContentDispositionLength;
-
- ///
- /// Specifies the content disposition of the blob
- ///
- public string ContentDisposition;
-
- ///
- /// Specifies the length of the cache control which has been applied to the blob.
- ///
- public ushort CacheControlLength;
-
- ///
- /// Specifies the cache control of the blob
- ///
- public string CacheControl;
-
- ///
- /// Specifies the tier if this is a block or page blob respectfully. Only one or none can be specified at a time.
- ///
- public JobPartPlanBlockBlobTier BlockBlobTier;
- public JobPartPlanPageBlobTier PageBlobTier;
-
- ///
- /// Controls uploading of MD5 hashes
- ///
- public bool PutMd5;
-
- ///
- /// Length of metadata
- ///
- public ushort MetadataLength;
-
- ///
- /// Metadata
- ///
- public string Metadata;
-
- ///
- /// Length of blob tags
- ///
- public long BlobTagsLength;
-
- ///
- /// Blob Tags
- ///
- public string BlobTags;
-
- ///
- /// Is source encrypted?
- ///
- public bool IsSourceEncrypted;
-
- ///
- /// CPK encryption scope.
- ///
- public ushort CpkScopeInfoLength;
-
- ///
- /// Length of CPK encryption scope.
- ///
- public string CpkScopeInfo;
-
- ///
- /// Specifies the maximum size of block which determines the number of chunks and chunk size of a transfer
- ///
- public long BlockSize;
-
- public JobPartPlanDestinationBlob(
- JobPlanBlobType blobType,
- bool noGuessMimeType,
- string contentType,
- string contentEncoding,
- string contentLanguage,
- string contentDisposition,
- string cacheControl,
- JobPartPlanBlockBlobTier blockBlobTier,
- JobPartPlanPageBlobTier pageBlobTier,
- bool putMd5,
- string metadata,
- string blobTags,
- bool isSourceEncrypted,
- string cpkScopeInfo,
- long blockSize)
- : this(
- blobType: blobType,
- noGuessMimeType: noGuessMimeType,
- contentType: contentType,
- contentEncoding: contentEncoding,
- contentLanguage: contentLanguage,
- contentDisposition: contentDisposition,
- cacheControl: cacheControl,
- blockBlobTier: blockBlobTier,
- pageBlobTier: pageBlobTier,
- putMd5: putMd5,
- metadata: metadata.ToDictionary(nameof(metadata)),
- blobTags: blobTags.ToDictionary(nameof(blobTags)),
- isSourceEncrypted: isSourceEncrypted,
- cpkScopeInfo: cpkScopeInfo,
- blockSize: blockSize)
- {
- }
-
- public JobPartPlanDestinationBlob(
- JobPlanBlobType blobType,
- bool noGuessMimeType,
- string contentType,
- string contentEncoding,
- string contentLanguage,
- string contentDisposition,
- string cacheControl,
- JobPartPlanBlockBlobTier blockBlobTier,
- JobPartPlanPageBlobTier pageBlobTier,
- bool putMd5,
- IDictionary metadata,
- IDictionary blobTags,
- bool isSourceEncrypted,
- string cpkScopeInfo,
- long blockSize)
- {
- BlobType = blobType;
- NoGuessMimeType = noGuessMimeType;
- if (contentType.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- ContentType = contentType;
- ContentTypeLength = (ushort) contentType.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(ContentType),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: contentType.Length);
- }
- if (contentEncoding.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- ContentEncoding = contentEncoding;
- ContentEncodingLength = (ushort) contentEncoding.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(ContentEncoding),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: contentEncoding.Length);
- }
- if (contentLanguage.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- ContentLanguage = contentLanguage;
- ContentLanguageLength = (ushort) contentLanguage.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(ContentLanguage),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: contentLanguage.Length);
- }
- if (contentDisposition.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- ContentDisposition = contentDisposition;
- ContentDispositionLength = (ushort) contentDisposition.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(ContentDisposition),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: contentDisposition.Length);
- }
- if (cacheControl.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- CacheControl = cacheControl;
- CacheControlLength = (ushort) cacheControl.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(CacheControl),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: cacheControl.Length);
- }
- BlockBlobTier = blockBlobTier;
- PageBlobTier = pageBlobTier;
- PutMd5 = putMd5;
- string metadataConvert = metadata.DictionaryToString();
- if (metadataConvert.Length <= DataMovementConstants.JobPartPlanFile.MetadataStrMaxLength)
- {
- Metadata = metadataConvert;
- MetadataLength = (ushort) metadataConvert.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(Metadata),
- expectedSize: DataMovementConstants.JobPartPlanFile.MetadataStrMaxLength,
- actualSize: metadataConvert.Length);
- }
- string blobTagsConvert = blobTags.DictionaryToString();
- if (blobTagsConvert.Length <= DataMovementConstants.JobPartPlanFile.BlobTagsStrMaxLength)
- {
- BlobTags = blobTagsConvert;
- BlobTagsLength = blobTagsConvert.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(blobTags),
- expectedSize: DataMovementConstants.JobPartPlanFile.BlobTagsStrMaxLength,
- actualSize: blobTagsConvert.Length);
- }
- IsSourceEncrypted = isSourceEncrypted;
- if (cpkScopeInfo.Length <= DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength)
- {
- CpkScopeInfo = cpkScopeInfo;
- CpkScopeInfoLength = (ushort) cpkScopeInfo.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(CpkScopeInfo),
- expectedSize: DataMovementConstants.JobPartPlanFile.HeaderValueMaxLength,
- actualSize: cpkScopeInfo.Length);
- }
- BlockSize = blockSize;
- }
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationLocal.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationLocal.cs
deleted file mode 100644
index e6f69bd3b7d94..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanDestinationLocal.cs
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- ///
- /// This matching the JobPartPlanDstLocal of azcopy
- ///
- internal class JobPartPlanDestinationLocal
- {
- // Once set, the following fields are constants; they should never be modified
-
- // Specifies whether the timestamp of destination file has to be set to the modified time of source file
- public bool PreserveLastModifiedTime;
-
- // Says how checksum verification failures should be actioned
- // TODO: Probably use an Enum once feature is implemented
- public byte ChecksumVerificationOption;
-
- public JobPartPlanDestinationLocal(
- bool preserveLastModifiedTime,
- byte checksumVerificationOption)
- {
- PreserveLastModifiedTime = preserveLastModifiedTime;
- ChecksumVerificationOption = checksumVerificationOption;
- }
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanFileName.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanFileName.cs
index adc057dc1681b..b0f6944afa378 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanFileName.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanFileName.cs
@@ -12,18 +12,17 @@ namespace Azure.Storage.DataMovement.JobPlan
/// Saved Job Part Plan File
///
/// Format of the job part plan file name
- /// {transferid}--{jobpartNumber}.steV{schemaVersion}
+ /// {transferid}.{jobpartNumber}.ndmpart
/// e.g. will look like
- /// 204b6e20-e642-fb40-4597-4a35ff5e199f--00001.steV17
+ /// 204b6e20-e642-fb40-4597-4a35ff5e199f.00001.ndmpart
/// where the following information would be
/// transfer id: 204b6e20-e642-fb40-4597-4a35ff5e199f
/// job part number: 00001
- /// version schema: 17
///
internal class JobPartPlanFileName
{
///
- /// Prefix path
+ /// Prefix path.
///
public string PrefixPath { get; }
@@ -40,12 +39,6 @@ internal class JobPartPlanFileName
///
public int JobPartNumber { get; }
- ///
- /// Schema version of the job part plan file. As the schema can change we have
- /// to keep track the version number.
- ///
- public string SchemaVersion { get; }
-
///
/// Full path of the file.
///
@@ -59,14 +52,12 @@ protected JobPartPlanFileName()
/// Creates Job Part Plan File Name
///
/// Path to where all checkpointer files are stored.
- ///
- ///
- ///
+ /// The transfer id.
+ /// The job part number.
public JobPartPlanFileName(
string checkpointerPath,
string id,
- int jobPartNumber,
- string schemaVersion = DataMovementConstants.JobPartPlanFile.SchemaVersion)
+ int jobPartNumber)
{
Argument.AssertNotNullOrEmpty(checkpointerPath, nameof(checkpointerPath));
Argument.AssertNotNullOrEmpty(id, nameof(id));
@@ -74,9 +65,8 @@ public JobPartPlanFileName(
PrefixPath = checkpointerPath;
Id = id;
JobPartNumber = jobPartNumber;
- SchemaVersion = schemaVersion;
- string fileName = $"{Id}--{JobPartNumber.ToString("D5", NumberFormatInfo.CurrentInfo)}{DataMovementConstants.JobPartPlanFile.FileExtension}{SchemaVersion}";
+ string fileName = $"{Id}.{JobPartNumber.ToString("D5", NumberFormatInfo.CurrentInfo)}{DataMovementConstants.JobPartPlanFile.FileExtension}";
FullPath = Path.Combine(PrefixPath, fileName);
}
@@ -86,49 +76,40 @@ public JobPartPlanFileName(string fullPath)
Argument.CheckNotNullOrEmpty(fullPath, nameof(fullPath));
PrefixPath = Path.GetDirectoryName(fullPath);
- if (!Path.HasExtension(fullPath))
+ if (!Path.HasExtension(fullPath) ||
+ !Path.GetExtension(fullPath).Equals(DataMovementConstants.JobPartPlanFile.FileExtension))
{
throw Errors.InvalidJobPartFileNameExtension(fullPath);
}
string fileName = Path.GetFileNameWithoutExtension(fullPath);
- string extension = Path.GetExtension(fullPath);
// Format of the job plan file name
- // {transferid}--{jobpartNumber}.steV{schemaVersion}
+ // {transferid}.{jobpartNumber}.ndmpart
+
+ string[] fileNameSplit = fileName.Split('.');
+ if (fileNameSplit.Length != 2)
+ {
+ throw Errors.InvalidJobPartFileName(fullPath);
+ }
// Check for valid Transfer Id
- int endTransferIdIndex = fileName.IndexOf(DataMovementConstants.JobPartPlanFile.JobPlanFileNameDelimiter, StringComparison.InvariantCultureIgnoreCase);
- if (endTransferIdIndex != DataMovementConstants.JobPartPlanFile.IdSize)
+ if (fileNameSplit[0].Length != DataMovementConstants.JobPartPlanFile.IdSize)
{
throw Errors.InvalidTransferIdFileName(fullPath);
}
- Id = fileName.Substring(0, endTransferIdIndex);
+ Id = fileNameSplit[0];
// Check for valid transfer part number
- int partStartIndex = endTransferIdIndex + DataMovementConstants.JobPartPlanFile.JobPlanFileNameDelimiter.Length;
- int endPartIndex = fileName.Length;
-
- if (endPartIndex - partStartIndex != DataMovementConstants.JobPartPlanFile.JobPartLength)
+ if (fileNameSplit[1].Length != DataMovementConstants.JobPartPlanFile.JobPartLength)
{
throw Errors.InvalidJobPartNumberFileName(fullPath);
}
- if (!int.TryParse(
- fileName.Substring(partStartIndex, DataMovementConstants.JobPartPlanFile.JobPartLength),
- NumberStyles.Number,
- CultureInfo.InvariantCulture,
- out int jobPartNumber))
+ if (!int.TryParse(fileNameSplit[1], NumberStyles.Number, CultureInfo.InvariantCulture, out int jobPartNumber))
{
throw Errors.InvalidJobPartNumberFileName(fullPath);
}
JobPartNumber = jobPartNumber;
- string fullExtension = string.Concat(DataMovementConstants.JobPartPlanFile.FileExtension, DataMovementConstants.JobPartPlanFile.SchemaVersion);
- if (!fullExtension.Equals(extension))
- {
- throw Errors.InvalidSchemaVersionFileName(extension);
- }
- SchemaVersion = DataMovementConstants.JobPartPlanFile.SchemaVersion;
-
FullPath = fullPath;
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanHeader.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanHeader.cs
index 20b7e7b7a8377..85b7db205362e 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanHeader.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanHeader.cs
@@ -3,931 +3,311 @@
using System;
using System.IO;
+using System.Text;
using Azure.Core;
namespace Azure.Storage.DataMovement.JobPlan
{
- ///
- /// Stores the Job Part Header information to resume from.
- ///
internal class JobPartPlanHeader
{
///
- /// The version of data schema format of header
- /// This will seem weird because we will have a schema for how we store the data
- /// when the schema changes this version will increment
- ///
- /// TODO: Consider changing to an int when GA comes.
- /// TODO: In public preview we should
- /// leave the version as "b1", instead of complete ints.
+ /// The schema version.
///
public string Version;
///
- /// The start time of the job part.
- ///
- public DateTimeOffset StartTime;
-
- ///
- /// The Transfer/Job Id
- ///
- /// Size of a GUID.
+ /// The Transfer/Job Id.
///
public string TransferId;
///
- /// Job Part's part number (0+)
+ /// Job Part's part number (0+).
///
public long PartNumber;
///
- /// The length of the source resource identifier
+ /// The creation time of the job part.
///
- public ushort SourceResourceIdLength;
+ public DateTimeOffset CreateTime;
///
- /// The identifier of the source resource
+ /// A string identifier for the source resource.
///
- public string SourceResourceId;
+ public string SourceTypeId;
///
- /// The length of the source root path
+ /// A string identifier for the destination resource.
///
- public ushort SourcePathLength;
+ public string DestinationTypeId;
///
- /// The source path
+ /// The source path.
///
public string SourcePath;
///
- /// The length of the source path query
- ///
- public ushort SourceExtraQueryLength;
-
- ///
- /// Extra query params applicable to the source
- ///
- public string SourceExtraQuery;
-
- ///
- /// The length of the destination resource identifier
- ///
- public ushort DestinationResourceIdLength;
-
- ///
- /// The identifier of the destination resource
- ///
- public string DestinationResourceId;
-
- ///
- /// The length of the destination root path
- ///
- public ushort DestinationPathLength;
-
- ///
- /// The destination path
+ /// The destination path.
///
public string DestinationPath;
///
- /// The length of the destination path query
- ///
- public ushort DestinationExtraQueryLength;
-
- ///
- /// Extra query params applicable to the dest
+ /// Whether the destination should be overriden or not.
///
- public string DestinationExtraQuery;
+ public bool Overwrite;
///
- /// True if this is the Job's last part; else false
+ /// Ths intial transfer size for the transfer.
///
- public bool IsFinalPart;
+ public long InitialTransferSize;
///
- /// True if the existing blobs needs to be overwritten.
+ /// The chunk size to use for the transfer.
///
- public bool ForceWrite;
+ public long ChunkSize;
///
- /// Supplements ForceWrite with an additional setting for Azure Files. If true, the read-only attribute will be cleared before we overwrite
- ///
- public bool ForceIfReadOnly;
-
- ///
- /// if true, source data with encodings that represent compression are automatically decompressed when downloading
- ///
- public bool AutoDecompress;
-
- ///
- /// The Job Part's priority
+ /// The job part priority (future use).
///
public byte Priority;
///
- /// Time to live after completion is used to persists the file on disk of specified time after the completion of JobPartOrder
- ///
- public DateTimeOffset TTLAfterCompletion;
-
- ///
- /// The location of the transfer's source and destination
- ///
- public JobPlanOperation JobPlanOperation;
-
- ///
- /// option specifying how folders will be handled
- ///
- public FolderPropertiesMode FolderPropertyMode;
-
- ///
- /// The number of transfers in the Job part
- ///
- public long NumberChunks;
-
- ///
- /// Additional data for blob destinations
- /// Holds the additional information about the blob
- ///
- public JobPartPlanDestinationBlob DstBlobData;
-
- ///
- /// Additional data for local destinations
- ///
- public JobPartPlanDestinationLocal DstLocalData;
-
- ///
- /// If applicable the SMB information
- ///
- public bool PreserveSMBPermissions;
-
- ///
- /// Whether to preserve SMB info
- ///
- public bool PreserveSMBInfo;
-
- ///
- /// S2SGetPropertiesInBackend represents whether to enable get S3 objects' or Azure files' properties during s2s copy in backend.
- ///
- public bool S2SGetPropertiesInBackend;
-
- ///
- /// S2SSourceChangeValidation represents whether user wants to check if source has changed after enumerating.
- ///
- public bool S2SSourceChangeValidation;
-
- ///
- /// DestLengthValidation represents whether the user wants to check if the destination has a different content-length
- ///
- public bool DestLengthValidation;
-
- ///
- /// S2SInvalidMetadataHandleOption represents how user wants to handle invalid metadata.
- ///
- /// TODO: update to a struc tto handle the S2S Invalid metadata handle option
- ///
- public byte S2SInvalidMetadataHandleOption;
-
- ///
- /// For delete operation specify what to do with snapshots
+ /// The current status of the job part.
///
- public JobPartDeleteSnapshotsOption DeleteSnapshotsOption;
+ public DataTransferStatus JobPartStatus;
- ///
- /// Permanent Delete Option
- ///
- public JobPartPermanentDeleteOption PermanentDeleteOption;
-
- ///
- /// Rehydrate Priority type
- ///
- public JobPartPlanRehydratePriorityType RehydratePriorityType;
-
- // Any fields below this comment are NOT constants; they may change over as the job part is processed.
- // Care must be taken to read/write to these fields in a thread-safe way!
-
- // jobStatus_doNotUse represents the current status of JobPartPlan
- // jobStatus_doNotUse is a private member whose value can be accessed by Status and SetJobStatus
- // jobStatus_doNotUse should not be directly accessed anywhere except by the Status and SetJobStatus
- public DataTransferStatus AtomicJobStatus;
-
- public DataTransferStatus AtomicPartStatus;
-
- internal JobPartPlanHeader(
+ public JobPartPlanHeader(
string version,
- DateTimeOffset startTime,
string transferId,
long partNumber,
- string sourceResourceId,
+ DateTimeOffset createTime,
+ string sourceTypeId,
+ string destinationTypeId,
string sourcePath,
- string sourceExtraQuery,
- string destinationResourceId,
string destinationPath,
- string destinationExtraQuery,
- bool isFinalPart,
- bool forceWrite,
- bool forceIfReadOnly,
- bool autoDecompress,
+ bool overwrite,
+ long initialTransferSize,
+ long chunkSize,
byte priority,
- DateTimeOffset ttlAfterCompletion,
- JobPlanOperation jobPlanOperation,
- FolderPropertiesMode folderPropertyMode,
- long numberChunks,
- JobPartPlanDestinationBlob dstBlobData,
- JobPartPlanDestinationLocal dstLocalData,
- bool preserveSMBPermissions,
- bool preserveSMBInfo,
- bool s2sGetPropertiesInBackend,
- bool s2sSourceChangeValidation,
- bool destLengthValidation,
- byte s2sInvalidMetadataHandleOption,
- JobPartDeleteSnapshotsOption deleteSnapshotsOption,
- JobPartPermanentDeleteOption permanentDeleteOption,
- JobPartPlanRehydratePriorityType rehydratePriorityType,
- DataTransferStatus atomicJobStatus,
- DataTransferStatus atomicPartStatus)
+ DataTransferStatus jobPartStatus)
{
- // Version String size verification
Argument.AssertNotNullOrEmpty(version, nameof(version));
- Argument.AssertNotNull(startTime, nameof(startTime));
Argument.AssertNotNullOrEmpty(transferId, nameof(transferId));
+ Argument.AssertNotNull(createTime, nameof(createTime));
+ Argument.AssertNotNullOrEmpty(sourceTypeId, nameof(sourceTypeId));
+ Argument.AssertNotNullOrWhiteSpace(destinationTypeId, nameof(destinationTypeId));
Argument.AssertNotNullOrEmpty(sourcePath, nameof(sourcePath));
Argument.AssertNotNullOrWhiteSpace(destinationPath, nameof(destinationPath));
- Argument.AssertNotNull(ttlAfterCompletion, nameof(ttlAfterCompletion));
- Argument.AssertNotNull(dstBlobData, nameof(dstBlobData));
- Argument.AssertNotNull(dstLocalData, nameof(dstLocalData));
- // Version
- if (version.Length == DataMovementConstants.JobPartPlanFile.VersionStrLength)
- {
- Version = version;
- }
- else
+ Argument.AssertNotNull(jobPartStatus, nameof(jobPartStatus));
+
+ if (version.Length != DataMovementConstants.JobPartPlanFile.VersionStrLength)
{
- throw Errors.InvalidPlanFileElement(
+ throw Errors.InvalidPartHeaderElementLength(
elementName: nameof(Version),
expectedSize: DataMovementConstants.JobPartPlanFile.VersionStrLength,
actualSize: version.Length);
}
- StartTime = startTime;
- // TransferId
- if (transferId.Length == DataMovementConstants.JobPartPlanFile.TransferIdStrLength)
- {
- TransferId = transferId;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(TransferId),
- expectedSize: DataMovementConstants.JobPartPlanFile.TransferIdStrLength,
- actualSize: transferId.Length);
- }
- PartNumber = partNumber;
- // Source resource type
- if (sourceResourceId.Length <= DataMovementConstants.JobPartPlanFile.ResourceIdMaxStrLength)
- {
- SourceResourceId = sourceResourceId;
- SourceResourceIdLength = (ushort) sourceResourceId.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(sourceResourceId),
- expectedSize: DataMovementConstants.JobPartPlanFile.ResourceIdMaxStrLength,
- actualSize: sourceResourceId.Length);
- }
- // SourcePath
- if (sourcePath.Length <= DataMovementConstants.JobPartPlanFile.PathStrMaxLength)
- {
- SourcePath = sourcePath;
- SourcePathLength = (ushort) sourcePath.Length;
- }
- else
+ if (!Guid.TryParse(transferId, out Guid _))
{
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(SourcePath),
- expectedSize: DataMovementConstants.JobPartPlanFile.PathStrMaxLength,
- actualSize: sourcePath.Length);
+ throw Errors.InvalidPartHeaderElement(nameof(transferId), transferId);
}
- // SourceQuery
- if (sourceExtraQuery.Length <= DataMovementConstants.JobPartPlanFile.ExtraQueryMaxLength)
+ if (sourceTypeId.Length > DataMovementConstants.JobPartPlanFile.TypeIdMaxStrLength)
{
- SourceExtraQuery = sourceExtraQuery;
- SourceExtraQueryLength = (ushort) sourceExtraQuery.Length;
+ throw Errors.InvalidPartHeaderElementLength(
+ elementName: nameof(sourceTypeId),
+ expectedSize: DataMovementConstants.JobPartPlanFile.TypeIdMaxStrLength,
+ actualSize: sourceTypeId.Length);
}
- else
+ if (destinationTypeId.Length > DataMovementConstants.JobPartPlanFile.TypeIdMaxStrLength)
{
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(SourceExtraQuery),
- expectedSize: DataMovementConstants.JobPartPlanFile.ExtraQueryMaxLength,
- actualSize: sourceExtraQuery.Length);
+ throw Errors.InvalidPartHeaderElementLength(
+ elementName: nameof(destinationTypeId),
+ expectedSize: DataMovementConstants.JobPartPlanFile.TypeIdMaxStrLength,
+ actualSize: destinationTypeId.Length);
}
- // Destination resource type
- if (destinationResourceId.Length <= DataMovementConstants.JobPartPlanFile.ResourceIdMaxStrLength)
- {
- DestinationResourceId = destinationResourceId;
- DestinationResourceIdLength = (ushort)destinationResourceId.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(destinationResourceId),
- expectedSize: DataMovementConstants.JobPartPlanFile.ResourceIdMaxStrLength,
- actualSize: destinationResourceId.Length);
- }
- // DestinationPath
- if (destinationPath.Length <= DataMovementConstants.JobPartPlanFile.PathStrMaxLength)
- {
- DestinationPath = destinationPath;
- DestinationPathLength = (ushort) destinationPath.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(DestinationPath),
- expectedSize: DataMovementConstants.JobPartPlanFile.PathStrMaxLength,
- actualSize: destinationPath.Length);
- }
- // DestinationQuery
- if (destinationExtraQuery.Length <= DataMovementConstants.JobPartPlanFile.ExtraQueryMaxLength)
- {
- DestinationExtraQuery = destinationExtraQuery;
- DestinationExtraQueryLength = (ushort) destinationExtraQuery.Length;
- }
- else
- {
- throw Errors.InvalidPlanFileElement(
- elementName: nameof(DestinationExtraQuery),
- expectedSize: DataMovementConstants.JobPartPlanFile.ExtraQueryMaxLength,
- actualSize: destinationExtraQuery.Length);
- }
- IsFinalPart = isFinalPart;
- ForceWrite = forceWrite;
- ForceIfReadOnly = forceIfReadOnly;
- AutoDecompress = autoDecompress;
+
+ Version = version;
+ TransferId = transferId;
+ PartNumber = partNumber;
+ CreateTime = createTime;
+ SourceTypeId = sourceTypeId;
+ DestinationTypeId = destinationTypeId;
+ SourcePath = sourcePath;
+ DestinationPath = destinationPath;
+ Overwrite = overwrite;
+ InitialTransferSize = initialTransferSize;
+ ChunkSize = chunkSize;
Priority = priority;
- TTLAfterCompletion = ttlAfterCompletion;
- JobPlanOperation = jobPlanOperation;
- FolderPropertyMode = folderPropertyMode;
- NumberChunks = numberChunks;
- DstBlobData = dstBlobData;
- DstLocalData = dstLocalData;
- PreserveSMBPermissions = preserveSMBPermissions;
- PreserveSMBInfo = preserveSMBInfo;
- S2SGetPropertiesInBackend = s2sGetPropertiesInBackend;
- S2SSourceChangeValidation = s2sSourceChangeValidation;
- DestLengthValidation = destLengthValidation;
- S2SInvalidMetadataHandleOption = s2sInvalidMetadataHandleOption;
- DeleteSnapshotsOption = deleteSnapshotsOption;
- PermanentDeleteOption = permanentDeleteOption;
- RehydratePriorityType = rehydratePriorityType;
- AtomicJobStatus = atomicJobStatus;
- AtomicPartStatus = atomicPartStatus;
+ JobPartStatus = jobPartStatus;
}
- ///
- /// Serializes the to the specified .
- ///
- /// The to which the serialized will be written.
public void Serialize(Stream stream)
{
- if (stream is null)
- {
- throw new ArgumentNullException(nameof(stream));
- }
-
+ Argument.AssertNotNull(stream, nameof(stream));
BinaryWriter writer = new BinaryWriter(stream);
+ int currentVariableLengthIndex = DataMovementConstants.JobPartPlanFile.VariableLengthStartIndex;
// Version
- WriteString(writer, Version, DataMovementConstants.JobPartPlanFile.VersionStrNumBytes);
-
- // StartTime
- writer.Write(StartTime.Ticks);
+ writer.WritePaddedString(Version, DataMovementConstants.JobPartPlanFile.VersionStrNumBytes);
- // TransferId
- WriteString(writer, TransferId, DataMovementConstants.JobPartPlanFile.TransferIdStrNumBytes);
+ // TransferId (write as bytes)
+ Guid transferId = Guid.Parse(TransferId);
+ writer.Write(transferId.ToByteArray());
// PartNumber
writer.Write(PartNumber);
- // SourceResourceIdLength
- writer.Write(SourceResourceIdLength);
+ // CreateTime
+ writer.Write(CreateTime.Ticks);
- // SourceResourceId
- WriteString(writer, SourceResourceId, DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes);
+ // SourceTypeId
+ writer.WritePaddedString(SourceTypeId, DataMovementConstants.JobPartPlanFile.TypeIdNumBytes);
- // SourcePathLength
- writer.Write(SourcePathLength);
+ // DestinationTypeId
+ writer.WritePaddedString(DestinationTypeId, DataMovementConstants.JobPartPlanFile.TypeIdNumBytes);
- // SourcePath
- WriteString(writer, SourcePath, DataMovementConstants.JobPartPlanFile.PathStrNumBytes);
-
- // SourceExtraQueryLength
- writer.Write(SourceExtraQueryLength);
-
- // SourceExtraQuery
- WriteString(writer, SourceExtraQuery, DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes);
-
- // DestinationResourceIdLength
- writer.Write(DestinationResourceIdLength);
+ // SourcePath offset/length
+ byte[] sourcePathBytes = Encoding.UTF8.GetBytes(SourcePath);
+ writer.WriteVariableLengthFieldInfo(sourcePathBytes.Length, ref currentVariableLengthIndex);
- // DestinationResourceId
- WriteString(writer, DestinationResourceId, DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes);
-
- // DestinationPathLength
- writer.Write(DestinationPathLength);
-
- // DestinationPath
- WriteString(writer, DestinationPath, DataMovementConstants.JobPartPlanFile.PathStrNumBytes);
+ // DestinationPath offset/length
+ byte[] destinationPathBytes = Encoding.UTF8.GetBytes(DestinationPath);
+ writer.WriteVariableLengthFieldInfo(destinationPathBytes.Length, ref currentVariableLengthIndex);
- // DestinationExtraQueryLength
- writer.Write(DestinationExtraQueryLength);
+ // Overwrite
+ writer.Write(Overwrite);
- // DestinationExtraQuery
- WriteString(writer, DestinationExtraQuery, DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes);
+ // InitialTransferSize
+ writer.Write(InitialTransferSize);
- // IsFinalPart
- writer.Write(Convert.ToByte(IsFinalPart));
-
- // ForceWrite
- writer.Write(Convert.ToByte(ForceWrite));
-
- // ForceIfReadOnly
- writer.Write(Convert.ToByte(ForceIfReadOnly));
-
- // AutoDecompress
- writer.Write(Convert.ToByte(AutoDecompress));
+ // ChunkSize
+ writer.Write(ChunkSize);
// Priority
writer.Write(Priority);
- // TTLAfterCompletion
- writer.Write(TTLAfterCompletion.Ticks);
-
- // FromTo
- writer.Write((byte)JobPlanOperation);
-
- // FolderPropertyOption
- writer.Write((byte)FolderPropertyMode);
-
- // NumberChunks
- writer.Write(NumberChunks);
-
- // DstBlobData.BlobType
- writer.Write((byte)DstBlobData.BlobType);
-
- // DstBlobData.NoGuessMimeType
- writer.Write(Convert.ToByte(DstBlobData.NoGuessMimeType));
-
- // DstBlobData.ContentTypeLength
- writer.Write(DstBlobData.ContentTypeLength);
-
- // DstBlobData.ContentType
- WriteString(writer, DstBlobData.ContentType, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.ContentEncodingLength
- writer.Write(DstBlobData.ContentEncodingLength);
-
- // DstBlobData.ContentEncoding
- WriteString(writer, DstBlobData.ContentEncoding, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.ContentLanguageLength
- writer.Write(DstBlobData.ContentLanguageLength);
-
- // DstBlobData.ContentLanguage
- WriteString(writer, DstBlobData.ContentLanguage, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.ContentDispositionLength
- writer.Write(DstBlobData.ContentDispositionLength);
-
- // DstBlobData.ContentDisposition
- WriteString(writer, DstBlobData.ContentDisposition, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.CacheControlLength
- writer.Write(DstBlobData.CacheControlLength);
-
- // DstBlobData.CacheControl
- WriteString(writer, DstBlobData.CacheControl, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.BlockBlobTier
- writer.Write((byte)DstBlobData.BlockBlobTier);
-
- // DstBlobData.PageBlobTier
- writer.Write((byte)DstBlobData.PageBlobTier);
-
- // DstBlobData.PutMd5
- writer.Write(Convert.ToByte(DstBlobData.PutMd5));
-
- // DstBlobData.MetadataLength
- writer.Write(DstBlobData.MetadataLength);
-
- // DstBlobData.Metadata
- WriteString(writer, DstBlobData.Metadata, DataMovementConstants.JobPartPlanFile.MetadataStrNumBytes);
-
- // DstBlobData.BlobTagsLength
- writer.Write(DstBlobData.BlobTagsLength);
-
- // DstBlobData.BlobTags
- WriteString(writer, DstBlobData.BlobTags, DataMovementConstants.JobPartPlanFile.BlobTagsStrNumBytes);
-
- // DstBlobData.IsSourceEncrypted
- writer.Write(DstBlobData.IsSourceEncrypted);
-
- // DstBlobData.CpkScopeInfoLength
- writer.Write(DstBlobData.CpkScopeInfoLength);
-
- // DstBlobData.CpkScopeInfo
- WriteString(writer, DstBlobData.CpkScopeInfo, DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
-
- // DstBlobData.BlockSize
- writer.Write(DstBlobData.BlockSize);
-
- // DstLocalData.PreserveLastModifiedTime
- writer.Write(Convert.ToByte(DstLocalData.PreserveLastModifiedTime));
+ // JobPartStatus
+ writer.Write((int)JobPartStatus.ToJobPlanStatus());
- // DstLocalData.MD5VerificationOption
- writer.Write(DstLocalData.ChecksumVerificationOption);
-
- // PreserveSMBPermissions
- writer.Write(Convert.ToByte(PreserveSMBPermissions));
-
- // PreserveSMBInfo
- writer.Write(Convert.ToByte(PreserveSMBInfo));
-
- // S2SGetPropertiesInBackend
- writer.Write(Convert.ToByte(S2SGetPropertiesInBackend));
-
- // S2SSourceChangeValidationBuffer
- writer.Write(Convert.ToByte(S2SSourceChangeValidation));
-
- // DestLengthValidation
- writer.Write(Convert.ToByte(DestLengthValidation));
-
- // S2SInvalidMetadataHandleOption
- writer.Write(S2SInvalidMetadataHandleOption);
-
- // DeleteSnapshotsOption
- writer.Write((byte)DeleteSnapshotsOption);
-
- // PermanentDeleteOption
- writer.Write((byte)PermanentDeleteOption);
-
- // RehydratePriorityType
- writer.Write((byte)RehydratePriorityType);
-
- // AtomicJobStatus.State
- writer.Write((byte)AtomicJobStatus.State);
-
- // AtomicJobStatus.HasFailedItems
- writer.Write(Convert.ToByte(AtomicJobStatus.HasFailedItems));
-
- // AtomicJobStatus.HasSkippedItems
- writer.Write(Convert.ToByte(AtomicJobStatus.HasSkippedItems));
-
- // AtomicPartStatus.State
- writer.Write((byte)AtomicPartStatus.State);
-
- // AtomicPartStatus.HasFailedItems
- writer.Write(Convert.ToByte(AtomicPartStatus.HasFailedItems));
+ // SourcePath
+ writer.Write(sourcePathBytes);
- // AtomicPartStatus.HasSkippedItems
- writer.Write(Convert.ToByte(AtomicPartStatus.HasSkippedItems));
+ // DestinationPath
+ writer.Write(destinationPathBytes);
}
public static JobPartPlanHeader Deserialize(Stream stream)
{
- if (stream is null)
- {
- throw new ArgumentNullException(nameof(stream));
- }
-
+ Argument.AssertNotNull(stream, nameof(stream));
BinaryReader reader = new BinaryReader(stream);
reader.BaseStream.Position = 0;
// Version
- byte[] versionBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.VersionStrNumBytes);
- string version = versionBuffer.ToString(DataMovementConstants.JobPartPlanFile.VersionStrLength);
+ string version = reader.ReadPaddedString(DataMovementConstants.JobPartPlanFile.VersionStrNumBytes);
// Assert the schema version before continuing
CheckSchemaVersion(version);
- // Start Time
- byte[] startTimeBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
- DateTimeOffset startTime = new DateTimeOffset(startTimeBuffer.ToLong(), new TimeSpan(0, 0, 0));
-
- // Transfer Id
- byte[] transferIdBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.TransferIdStrNumBytes);
- string transferId = transferIdBuffer.ToString(DataMovementConstants.JobPartPlanFile.TransferIdStrLength);
+ // TransferId
+ byte[] transferIdBuffer = reader.ReadBytes(DataMovementConstants.GuidSizeInBytes);
+ string transferId = new Guid(transferIdBuffer).ToString();
- // Job Part Number
+ // PartNumber
byte[] partNumberBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
long partNumber = partNumberBuffer.ToLong();
- // SourceResourceIdLength
- byte[] sourceResourceIdLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort sourceResourceIdLength = sourceResourceIdLengthBuffer.ToUShort();
+ // CreateTime
+ long createTimeTicks = reader.ReadInt64();
+ DateTimeOffset createTime = new DateTimeOffset(createTimeTicks, new TimeSpan(0, 0, 0));
- // SourceResourceId
- byte[] sourceResourceIdBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes);
- string sourceResourceId = sourceResourceIdBuffer.ToString(sourceResourceIdLength);
+ // SourceTypeId
+ string sourceTypeId = reader.ReadPaddedString(DataMovementConstants.JobPartPlanFile.TypeIdNumBytes);
- // SourcePathLength
- byte[] sourcePathLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort sourcePathLength = sourcePathLengthBuffer.ToUShort();
+ // DestinationTypeId
+ string destinationTypeId = reader.ReadPaddedString(DataMovementConstants.JobPartPlanFile.TypeIdNumBytes);
- // SourcePath
- byte[] sourcePathBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.PathStrNumBytes);
- string sourcePath = sourcePathBuffer.ToString(sourcePathLength);
-
- // SourceExtraQueryLength
- byte[] sourceExtraQueryLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort sourceExtraQueryLength = sourceExtraQueryLengthBuffer.ToUShort();
-
- // SourceExtraQuery
- byte[] sourceExtraQueryBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes);
- string sourceExtraQuery = sourceExtraQueryBuffer.ToString(sourceExtraQueryLength);
-
- // DestinationResourceIdLength
- byte[] destinationResourceIdLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort destinationResourceIdLength = destinationResourceIdLengthBuffer.ToUShort();
+ // SourcePath offset/length
+ int sourcePathOffset = reader.ReadInt32();
+ int sourcePathLength = reader.ReadInt32();
- // DestinationResourceId
- byte[] destinationResourceIdBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes);
- string destinationResourceId = destinationResourceIdBuffer.ToString(destinationResourceIdLength);
-
- // DestinationPathLength
- byte[] destinationPathLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort destinationPathLength = destinationPathLengthBuffer.ToUShort();
-
- // DestinationPath
- byte[] destinationPathBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.PathStrNumBytes);
- string destinationPath = destinationPathBuffer.ToString(destinationPathLength);
+ // DestinationPath offset/length
+ int destinationPathOffset = reader.ReadInt32();
+ int destinationPathLength = reader.ReadInt32();
- // DestinationExtraQueryLength
- byte[] destinationExtraQueryLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort destinationExtraQueryLength = destinationExtraQueryLengthBuffer.ToUShort();
+ // Overwrite
+ byte overwriteByte = reader.ReadByte();
+ bool overwrite = Convert.ToBoolean(overwriteByte);
- // DestinationExtraQuery
- byte[] destinationExtraQueryBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes);
- string destinationExtraQuery = destinationExtraQueryBuffer.ToString(destinationExtraQueryLength);
+ // InitialTransferSize
+ long initialTransferSize = reader.ReadInt64();
- // IsFinalPart
- byte isFinalPartByte = reader.ReadByte();
- bool isFinalPart = Convert.ToBoolean(isFinalPartByte);
-
- // ForceWrite
- byte forceWriteByte = reader.ReadByte();
- bool forceWrite = Convert.ToBoolean(forceWriteByte);
-
- // ForceIfReadOnly
- byte forceIfReadOnlyByte = reader.ReadByte();
- bool forceIfReadOnly = Convert.ToBoolean(forceIfReadOnlyByte);
-
- // AutoDecompress
- byte autoDecompressByte = reader.ReadByte();
- bool autoDecompress = Convert.ToBoolean(autoDecompressByte);
+ // ChunkSize
+ long chunkSize = reader.ReadInt64();
// Priority
byte priority = reader.ReadByte();
- // TTLAfterCompletion
- byte[] ttlAfterCompletionBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
- DateTimeOffset ttlAfterCompletion = new DateTimeOffset(ttlAfterCompletionBuffer.ToLong(), new TimeSpan(0, 0, 0));
-
- // JobPlanOperation
- byte fromToByte = reader.ReadByte();
- JobPlanOperation fromTo = (JobPlanOperation)fromToByte;
-
- // FolderPropertyOption
- byte folderPropertyOptionByte = reader.ReadByte();
- FolderPropertiesMode folderPropertyMode = (FolderPropertiesMode)folderPropertyOptionByte;
-
- // NumberChunks
- byte[] numberChunksBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
- long numberChunks = numberChunksBuffer.ToLong();
-
- // DstBlobData.BlobType
- byte blobTypeByte = reader.ReadByte();
- JobPlanBlobType blobType = (JobPlanBlobType)blobTypeByte;
-
- // DstBlobData.NoGuessMimeType
- byte noGuessMimeTypeByte = reader.ReadByte();
- bool noGuessMimeType = Convert.ToBoolean(noGuessMimeTypeByte);
-
- // DstBlobData.ContentTypeLength
- byte[] contentTypeLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort contentTypeLength = contentTypeLengthBuffer.ToUShort();
-
- // DstBlobData.ContentType
- byte[] contentTypeBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string contentType = contentTypeBuffer.ToString(contentTypeLength);
-
- // DstBlobData.ContentEncodingLength
- byte[] contentEncodingLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort contentEncodingLength = contentEncodingLengthBuffer.ToUShort();
-
- // DstBlobData.ContentEncoding
- byte[] contentEncodingBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string contentEncoding = contentEncodingBuffer.ToString(contentEncodingLength);
-
- // DstBlobData.ContentLanguageLength
- byte[] contentLanguageLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort contentLanguageLength = contentLanguageLengthBuffer.ToUShort();
-
- // DstBlobData.ContentLanguage
- byte[] contentLanguageBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string contentLanguage = contentLanguageBuffer.ToString(contentLanguageLength);
-
- // DstBlobData.ContentDispositionLength
- byte[] contentDispositionLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort contentDispositionLength = contentDispositionLengthBuffer.ToUShort();
-
- // DstBlobData.ContentDisposition
- byte[] contentDispositionBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string contentDisposition = contentDispositionBuffer.ToString(contentDispositionLength);
-
- // DstBlobData.CacheControlLength
- byte[] cacheControlLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort cacheControlLength = cacheControlLengthBuffer.ToUShort();
-
- // DstBlobData.CacheControl
- byte[] cacheControlBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string cacheControl = cacheControlBuffer.ToString(cacheControlLength);
-
- // DstBlobData.BlockBlobTier
- byte blockBlobTierByte = reader.ReadByte();
- JobPartPlanBlockBlobTier blockBlobTier = (JobPartPlanBlockBlobTier)blockBlobTierByte;
-
- // DstBlobData.PageBlobTier
- byte pageBlobTierByte = reader.ReadByte();
- JobPartPlanPageBlobTier pageBlobTier = (JobPartPlanPageBlobTier)pageBlobTierByte;
-
- // DstBlobData.PutMd5
- byte putMd5Byte = reader.ReadByte();
- bool putMd5 = Convert.ToBoolean(putMd5Byte);
-
- // DstBlobData.MetadataLength
- byte[] metadataLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort metadataLength = metadataLengthBuffer.ToUShort();
-
- // DstBlobData.Metadata
- byte[] metadataBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.MetadataStrNumBytes);
- string metadata = metadataBuffer.ToString(metadataLength);
-
- // DstBlobData.BlobTagsLength
- byte[] blobTagsLengthBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
- long blobTagsLength = blobTagsLengthBuffer.ToLong();
-
- // DstBlobData.BlobTags
- byte[] blobTagsBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.BlobTagsStrNumBytes);
- string blobTags = blobTagsBuffer.ToString(blobTagsLength);
-
- // DstBlobData.IsSourceEncrypted
- bool isSourceEncrypted = Convert.ToBoolean(reader.ReadByte());
-
- // DstBlobData.CpkScopeInfoLength
- byte[] cpkScopeInfoLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort cpkScopeInfoLength = cpkScopeInfoLengthBuffer.ToUShort();
-
- // DstBlobData.CpkScopeInfo
- byte[] cpkScopeInfoBuffer = reader.ReadBytes(DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes);
- string cpkScopeInfo = cpkScopeInfoBuffer.ToString(cpkScopeInfoLength);
-
- // DstBlobData.BlockSize
- byte[] blockSizeLengthBuffer = reader.ReadBytes(DataMovementConstants.LongSizeInBytes);
- long blockSize = blockSizeLengthBuffer.ToLong();
+ // JobPartStatus
+ JobPlanStatus jobPlanStatus = (JobPlanStatus)reader.ReadInt32();
+ DataTransferStatus jobPartStatus = jobPlanStatus.ToDataTransferStatus();
- // DstLocalData.PreserveLastModifiedTime
- bool preserveLastModifiedTime = Convert.ToBoolean(reader.ReadByte());
-
- // DstBlobData.MD5VerificationOption
- byte checksumVerificationOption = reader.ReadByte();
-
- // preserveSMBPermissions
- bool preserveSMBPermissions = Convert.ToBoolean(reader.ReadByte());
-
- // PreserveSMBInfo
- bool preserveSMBInfo = Convert.ToBoolean(reader.ReadByte());
-
- // S2SGetPropertiesInBackend
- bool s2sGetPropertiesInBackend = Convert.ToBoolean(reader.ReadByte());
-
- // S2SSourceChangeValidation
- bool s2sSourceChangeValidation = Convert.ToBoolean(reader.ReadByte());
-
- // DestLengthValidation
- bool destLengthValidation = Convert.ToBoolean(reader.ReadByte());
-
- // S2SInvalidMetadataHandleOption
- byte s2sInvalidMetadataHandleOption = reader.ReadByte();
-
- // DeleteSnapshotsOption
- byte deleteSnapshotsOptionByte = reader.ReadByte();
- JobPartDeleteSnapshotsOption deleteSnapshotsOption = (JobPartDeleteSnapshotsOption)deleteSnapshotsOptionByte;
-
- // PermanentDeleteOption
- byte permanentDeleteOptionByte = reader.ReadByte();
- JobPartPermanentDeleteOption permanentDeleteOption = (JobPartPermanentDeleteOption)permanentDeleteOptionByte;
-
- // RehydratePriorityType
- byte rehydratePriorityTypeByte = reader.ReadByte();
- JobPartPlanRehydratePriorityType rehydratePriorityType = (JobPartPlanRehydratePriorityType)rehydratePriorityTypeByte;
-
- // AtomicJobStatus.State
- byte atomicJobStatusByte = reader.ReadByte();
- DataTransferState jobStatusState = (DataTransferState)atomicJobStatusByte;
-
- // AtomicJobStatus.HasFailedItems
- bool jobStatusHasFailed = Convert.ToBoolean(reader.ReadByte());
-
- // AtomicJobStatus.HasSkippedItems
- bool jobStatusHasSkipped = Convert.ToBoolean(reader.ReadByte());
-
- // AtomicPartStatus.State
- byte atomicPartStatusByte = reader.ReadByte();
- DataTransferState partStatusState = (DataTransferState)atomicPartStatusByte;
-
- // AtomicPartStatus.HasFailedItems
- bool partStatusHasFailed = Convert.ToBoolean(reader.ReadByte());
-
- // AtomicPartStatus.HasSkippedItems
- bool partStatusHasSkipped = Convert.ToBoolean(reader.ReadByte());
-
- DataTransferStatus atomicJobStatus = new DataTransferStatus(
- jobStatusState,
- jobStatusHasFailed,
- jobStatusHasSkipped);
-
- DataTransferStatus atomicPartStatus = new DataTransferStatus(
- partStatusState,
- partStatusHasFailed,
- partStatusHasSkipped);
-
- JobPartPlanDestinationBlob dstBlobData = new JobPartPlanDestinationBlob(
- blobType: blobType,
- noGuessMimeType: noGuessMimeType,
- contentType: contentType,
- contentEncoding: contentEncoding,
- contentLanguage: contentLanguage,
- contentDisposition: contentDisposition,
- cacheControl: cacheControl,
- blockBlobTier: blockBlobTier,
- pageBlobTier: pageBlobTier,
- putMd5: putMd5,
- metadata: metadata,
- blobTags: blobTags,
- isSourceEncrypted: isSourceEncrypted,
- cpkScopeInfo: cpkScopeInfo,
- blockSize: blockSize);
+ // SourcePath
+ string sourcePath = null;
+ if (sourcePathOffset > 0)
+ {
+ reader.BaseStream.Position = sourcePathOffset;
+ byte[] parentSourcePathBytes = reader.ReadBytes(sourcePathLength);
+ sourcePath = parentSourcePathBytes.ToString(sourcePathLength);
+ }
- JobPartPlanDestinationLocal dstLocalData = new JobPartPlanDestinationLocal(
- preserveLastModifiedTime: preserveLastModifiedTime,
- checksumVerificationOption: checksumVerificationOption);
+ // DestinationPath
+ string destinationPath = null;
+ if (destinationPathOffset > 0)
+ {
+ reader.BaseStream.Position = destinationPathOffset;
+ byte[] parentSourcePathBytes = reader.ReadBytes(destinationPathLength);
+ destinationPath = parentSourcePathBytes.ToString(destinationPathLength);
+ }
return new JobPartPlanHeader(
- version: version,
- startTime: startTime,
- transferId: transferId,
- partNumber: partNumber,
- sourceResourceId: sourceResourceId,
- sourcePath: sourcePath,
- sourceExtraQuery: sourceExtraQuery,
- destinationResourceId: destinationResourceId,
- destinationPath: destinationPath,
- destinationExtraQuery: destinationExtraQuery,
- isFinalPart: isFinalPart,
- forceWrite: forceWrite,
- forceIfReadOnly: forceIfReadOnly,
- autoDecompress: autoDecompress,
- priority: priority,
- ttlAfterCompletion: ttlAfterCompletion,
- jobPlanOperation: fromTo,
- folderPropertyMode: folderPropertyMode,
- numberChunks: numberChunks,
- dstBlobData: dstBlobData,
- dstLocalData: dstLocalData,
- preserveSMBPermissions: preserveSMBPermissions,
- preserveSMBInfo: preserveSMBInfo,
- s2sGetPropertiesInBackend: s2sGetPropertiesInBackend,
- s2sSourceChangeValidation: s2sSourceChangeValidation,
- destLengthValidation: destLengthValidation,
- s2sInvalidMetadataHandleOption: s2sInvalidMetadataHandleOption,
- deleteSnapshotsOption: deleteSnapshotsOption,
- permanentDeleteOption: permanentDeleteOption,
- rehydratePriorityType: rehydratePriorityType,
- atomicJobStatus: atomicJobStatus,
- atomicPartStatus: atomicPartStatus);
+ version,
+ transferId,
+ partNumber,
+ createTime,
+ sourceTypeId,
+ destinationTypeId,
+ sourcePath,
+ destinationPath,
+ overwrite,
+ initialTransferSize,
+ chunkSize,
+ priority,
+ jobPartStatus);
}
- private static void WriteString(BinaryWriter writer, string value, int setSizeInBytes)
+ ///
+ /// Internal equals for testing.
+ ///
+ internal bool Equals(JobPartPlanHeader other)
{
- writer.Write(value.ToCharArray());
-
- int padding = setSizeInBytes - value.Length;
- if (padding > 0)
+ if (other is null)
{
- char[] paddingArray = new char[padding];
- writer.Write(paddingArray);
+ return false;
}
+
+ return
+ (Version == other.Version) &&
+ (TransferId == other.TransferId) &&
+ (PartNumber == other.PartNumber) &&
+ (CreateTime == other.CreateTime) &&
+ (SourceTypeId == other.SourceTypeId) &&
+ (DestinationTypeId == other.DestinationTypeId) &&
+ (SourcePath == other.SourcePath) &&
+ (DestinationPath == other.DestinationPath) &&
+ (Overwrite == other.Overwrite) &&
+ (InitialTransferSize == other.InitialTransferSize) &&
+ (ChunkSize == other.ChunkSize) &&
+ (Priority == other.Priority) &&
+ (JobPartStatus == other.JobPartStatus);
}
private static void CheckSchemaVersion(string version)
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanPageBlobTier.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanPageBlobTier.cs
deleted file mode 100644
index bf5c7b3ae7528..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanPageBlobTier.cs
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- internal enum JobPartPlanPageBlobTier
- {
- /// None.
- None = 0,
- /// P4.
- P4 = 4,
- /// P6.
- P6 = 6,
- /// P10.
- P10 = 10,
- /// P15.
- P15 = 15,
- /// P20.
- P20 = 20,
- /// P30.
- P30 = 30,
- /// P40.
- P40 = 40,
- /// P50.
- P50 = 50,
- /// P60.
- P60 = 60,
- /// P70.
- P70 = 70,
- /// P80.
- P80 = 80,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanRehydratePriorityType.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanRehydratePriorityType.cs
deleted file mode 100644
index 755b161b4f0cf..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPartPlanRehydratePriorityType.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- internal enum JobPartPlanRehydratePriorityType
- {
- None = 0,
- Standard = 1,
- High = 2
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanBlobType.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanBlobType.cs
deleted file mode 100644
index 65707dd02931f..0000000000000
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanBlobType.cs
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-namespace Azure.Storage.DataMovement.JobPlan
-{
- internal enum JobPlanBlobType
- {
- ///
- /// Detect blob type
- ///
- Detect = 0,
- ///
- /// Block Blob
- ///
- BlockBlob = 1,
- ///
- /// Page Blob
- ///
- PageBlob = 2,
- ///
- /// Append Blob
- ///
- AppendBlob = 3,
- }
-}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
index 7427946b1df00..fe3e406e919bb 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
@@ -50,7 +50,7 @@ public static async Task CreateJobPlanFileAsync(
Argument.AssertNotNullOrEmpty(id, nameof(id));
Argument.AssertNotNull(headerStream, nameof(headerStream));
- string fileName = $"{id}.{DataMovementConstants.JobPlanFile.FileExtension}";
+ string fileName = $"{id}{DataMovementConstants.JobPlanFile.FileExtension}";
string filePath = Path.Combine(checkpointerPath, fileName);
JobPlanFile jobPlanFile = new(id, filePath);
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanHeader.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanHeader.cs
index 5d1615ccdc809..95704de261b57 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanHeader.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanHeader.cs
@@ -99,6 +99,7 @@ public JobPlanHeader(
Argument.AssertNotNullOrEmpty(transferId, nameof(transferId));
Argument.AssertNotNullOrEmpty(sourceProviderId, nameof(sourceProviderId));
Argument.AssertNotNullOrEmpty(destinationProviderId, nameof(destinationProviderId));
+ Argument.AssertNotNull(jobStatus, nameof(jobStatus));
Argument.AssertNotNull(createTime, nameof(createTime));
Argument.AssertNotNullOrEmpty(parentSourcePath, nameof(parentSourcePath));
Argument.AssertNotNullOrEmpty(parentDestinationPath, nameof(parentDestinationPath));
@@ -168,7 +169,7 @@ public void Serialize(Stream stream)
BinaryWriter writer = new BinaryWriter(stream);
// Version
- WritePaddedString(writer, Version, DataMovementConstants.JobPlanFile.VersionStrNumBytes);
+ writer.WritePaddedString(Version, DataMovementConstants.JobPlanFile.VersionStrNumBytes);
// TransferId (write as bytes)
Guid transferId = Guid.Parse(TransferId);
@@ -181,10 +182,10 @@ public void Serialize(Stream stream)
writer.Write((byte)OperationType);
// SourceProviderId
- WritePaddedString(writer, SourceProviderId, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
+ writer.WritePaddedString(SourceProviderId, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
// DestinationProviderId
- WritePaddedString(writer, DestinationProviderId, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
+ writer.WritePaddedString(DestinationProviderId, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
// IsContainer
writer.Write(Convert.ToByte(IsContainer));
@@ -246,10 +247,10 @@ public static JobPlanHeader Deserialize(Stream stream)
JobPlanOperation operationType = (JobPlanOperation)operationTypeByte;
// SourceProviderId
- string sourceProviderId = ReadPaddedString(reader, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
+ string sourceProviderId = reader.ReadPaddedString(DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
// DestinationProviderId
- string destProviderId = ReadPaddedString(reader, DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
+ string destProviderId = reader.ReadPaddedString(DataMovementConstants.JobPlanFile.ProviderIdNumBytes);
// IsContainer
byte isContainerByte = reader.ReadByte();
@@ -328,25 +329,6 @@ public static JobPlanHeader Deserialize(Stream stream)
destinationCheckpointData);
}
- private static void WritePaddedString(BinaryWriter writer, string value, int setSizeInBytes)
- {
- byte[] valueBytes = Encoding.UTF8.GetBytes(value);
- writer.Write(valueBytes);
-
- int padding = setSizeInBytes - valueBytes.Length;
- if (padding > 0)
- {
- char[] paddingArray = new char[padding];
- writer.Write(paddingArray);
- }
- }
-
- private static string ReadPaddedString(BinaryReader reader, int numBytes)
- {
- byte[] stringBytes = reader.ReadBytes(numBytes);
- return stringBytes.ToString(numBytes).TrimEnd('\0');
- }
-
private static void CheckSchemaVersion(string version)
{
if (version != DataMovementConstants.JobPlanFile.SchemaVersion)
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlanExtensions.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlanExtensions.cs
index d8368553b2ab2..95953487fefc8 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlanExtensions.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlanExtensions.cs
@@ -3,7 +3,6 @@
using System;
using System.IO;
-using System.IO.MemoryMappedFiles;
using System.Threading;
using System.Threading.Tasks;
using Azure.Storage.DataMovement.JobPlan;
@@ -12,52 +11,6 @@ namespace Azure.Storage.DataMovement
{
internal static partial class JobPlanExtensions
{
- internal static JobPartPlanHeader GetJobPartPlanHeader(this JobPartPlanFileName fileName)
- {
- JobPartPlanHeader result;
- int bufferSize = DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes;
-
- using MemoryMappedFile memoryMappedFile = MemoryMappedFile.CreateFromFile(fileName.ToString());
- using (MemoryMappedViewStream stream = memoryMappedFile.CreateViewStream(0, bufferSize, MemoryMappedFileAccess.Read))
- {
- if (!stream.CanRead)
- {
- throw Errors.CannotReadMmfStream(fileName.ToString());
- }
- result = JobPartPlanHeader.Deserialize(stream);
- }
- return result;
- }
-
- internal static async Task GetHeaderUShortValue(
- this TransferCheckpointer checkpointer,
- string transferId,
- int startIndex,
- int streamReadLength,
- int valueLength,
- CancellationToken cancellationToken)
- {
- string value;
- using (Stream stream = await checkpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 0,
- offset: startIndex,
- length: streamReadLength,
- cancellationToken: cancellationToken).ConfigureAwait(false))
- {
- BinaryReader reader = new BinaryReader(stream);
-
- // Read Path Length
- byte[] pathLengthBuffer = reader.ReadBytes(DataMovementConstants.UShortSizeInBytes);
- ushort pathLength = pathLengthBuffer.ToUShort();
-
- // Read Path
- byte[] pathBuffer = reader.ReadBytes(valueLength);
- value = pathBuffer.ToString(pathLength);
- }
- return value;
- }
-
internal static async Task GetHeaderLongValue(
this TransferCheckpointer checkpointer,
string transferId,
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs b/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
index b36b131725f67..0b87991b39d8f 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
@@ -61,6 +61,10 @@ public override async Task AddNewJobAsync(
StorageResource destination,
CancellationToken cancellationToken = default)
{
+ Argument.AssertNotNullOrEmpty(transferId, nameof(transferId));
+ Argument.AssertNotNull(source, nameof(source));
+ Argument.AssertNotNull(destination, nameof(destination));
+
if (_transferStates.ContainsKey(transferId))
{
throw Errors.CollisionTransferIdCheckpointer(transferId);
@@ -97,13 +101,11 @@ public override async Task AddNewJobAsync(
public override async Task AddNewJobPartAsync(
string transferId,
int partNumber,
- int chunksTotal,
Stream headerStream,
CancellationToken cancellationToken = default)
{
Argument.AssertNotNullOrEmpty(transferId, nameof(transferId));
Argument.AssertNotNull(partNumber, nameof(partNumber));
- Argument.AssertNotNull(chunksTotal, nameof(chunksTotal));
Argument.AssertNotNull(headerStream, nameof(headerStream));
headerStream.Position = 0;
@@ -328,8 +330,8 @@ public override async Task SetJobPartTransferStatusAsync(
DataTransferStatus status,
CancellationToken cancellationToken = default)
{
- long length = DataMovementConstants.OneByte * 3;
- int offset = DataMovementConstants.JobPartPlanFile.AtomicPartStatusStateIndex;
+ long length = DataMovementConstants.IntSizeInBytes;
+ int offset = DataMovementConstants.JobPartPlanFile.JobPartStatusIndex;
CancellationHelper.ThrowIfCancellationRequested(cancellationToken);
@@ -340,27 +342,13 @@ public override async Task SetJobPartTransferStatusAsync(
// Lock MMF
await file.WriteLock.WaitAsync(cancellationToken).ConfigureAwait(false);
- using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(
- path: file.FilePath,
- mode: FileMode.Open,
- mapName: null,
- capacity: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
+ using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.FilePath, FileMode.Open))
+ using (MemoryMappedViewAccessor accessor = mmf.CreateViewAccessor(offset, length))
{
- using (MemoryMappedViewAccessor accessor = mmf.CreateViewAccessor(offset, length))
- {
- accessor.Write(
- position: 0,
- value: (byte)status.State);
- accessor.Write(
- position: 1,
- value: status.HasFailedItems);
- accessor.Write(
- position: 2,
- value: status.HasSkippedItems);
- // to flush to the underlying file that supports the mmf
- accessor.Flush();
- }
+ accessor.Write(0, (int)status.ToJobPlanStatus());
+ accessor.Flush();
}
+
// Release MMF
file.WriteLock.Release();
}
@@ -381,11 +369,12 @@ public override async Task SetJobPartTransferStatusAsync(
///
private void InitializeExistingCheckpointer()
{
+ // Enumerate the filesystem
+ IEnumerable checkpointFiles = Directory.EnumerateFiles(_pathToCheckpointer);
+
// First, retrieve all valid job plan files
- foreach (string path in Directory.EnumerateFiles(
- _pathToCheckpointer,
- $"*.{DataMovementConstants.JobPlanFile.FileExtension}",
- SearchOption.TopDirectoryOnly))
+ foreach (string path in checkpointFiles
+ .Where(p => Path.GetExtension(p) == DataMovementConstants.JobPlanFile.FileExtension))
{
// TODO: Should we check for valid schema version inside file now?
JobPlanFile jobPlanFile = JobPlanFile.LoadExistingJobPlanFile(path);
@@ -400,10 +389,8 @@ private void InitializeExistingCheckpointer()
}
// Retrieve all valid job part plan files stored in the checkpointer path.
- foreach (string path in Directory.EnumerateFiles(_pathToCheckpointer, "*", SearchOption.TopDirectoryOnly)
- .Where(f => Path.HasExtension(string.Concat(
- DataMovementConstants.JobPartPlanFile.FileExtension,
- DataMovementConstants.JobPartPlanFile.SchemaVersion))))
+ foreach (string path in checkpointFiles
+ .Where(p => Path.GetExtension(p) == DataMovementConstants.JobPartPlanFile.FileExtension))
{
// Ensure each file has the correct format
if (JobPartPlanFileName.TryParseJobPartPlanFileName(path, out JobPartPlanFileName partPlanFileName))
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/ServiceToServiceJobPart.cs b/sdk/storage/Azure.Storage.DataMovement/src/ServiceToServiceJobPart.cs
index 68411abee85f0..454fdc7702608 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/ServiceToServiceJobPart.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/ServiceToServiceJobPart.cs
@@ -89,7 +89,7 @@ public static async Task CreateJobPartAsync(
{
// Create Job Part file as we're initializing the job part
ServiceToServiceJobPart part = new ServiceToServiceJobPart(job, partNumber);
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
return part;
}
@@ -112,7 +112,7 @@ public static async Task CreateJobPartAsync(
length: length);
if (!partPlanFileExists)
{
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
}
return part;
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/Shared/CheckpointerExtensions.cs b/sdk/storage/Azure.Storage.DataMovement/src/Shared/CheckpointerExtensions.cs
index 3424079970b1c..682a0715dfb1a 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/Shared/CheckpointerExtensions.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/Shared/CheckpointerExtensions.cs
@@ -78,6 +78,25 @@ internal static void WriteVariableLengthFieldInfo(
writer.Write(length);
}
+ internal static void WritePaddedString(this BinaryWriter writer, string value, int setSizeInBytes)
+ {
+ byte[] valueBytes = Encoding.UTF8.GetBytes(value);
+ writer.Write(valueBytes);
+
+ int padding = setSizeInBytes - valueBytes.Length;
+ if (padding > 0)
+ {
+ char[] paddingArray = new char[padding];
+ writer.Write(paddingArray);
+ }
+ }
+
+ internal static string ReadPaddedString(this BinaryReader reader, int numBytes)
+ {
+ byte[] stringBytes = reader.ReadBytes(numBytes);
+ return stringBytes.ToString(numBytes).TrimEnd('\0');
+ }
+
internal static string ToSanitizedString(this Uri uri)
{
UriBuilder builder = new(uri);
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementConstants.cs b/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementConstants.cs
index 951c58d7851de..f1de3678cc375 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementConstants.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementConstants.cs
@@ -56,7 +56,6 @@ internal static class Log
internal const int OneByte = 1;
internal const int LongSizeInBytes = 8;
- internal const int UShortSizeInBytes = 2;
internal const int IntSizeInBytes = 4;
internal const int GuidSizeInBytes = 16;
@@ -68,7 +67,7 @@ internal static class JobPlanFile
internal const string SchemaVersion_b1 = "b1";
internal const string SchemaVersion = SchemaVersion_b1;
- internal const string FileExtension = "ndm";
+ internal const string FileExtension = ".ndm";
internal const int VersionStrLength = 2;
internal const int VersionStrNumBytes = VersionStrLength * 2;
@@ -105,171 +104,33 @@ internal static class JobPartPlanFile
internal const string SchemaVersion_b3 = "b3";
internal const string SchemaVersion = SchemaVersion_b3; // TODO: remove b for beta
- // Job Plan file extension. e.g. the file extension will look like {transferid}--{jobpartNumber}.steV{schemaVersion}
- internal const string FileExtension = ".steV";
- internal const string JobPlanFileNameDelimiter = "--";
+ // Job Plan file extension. e.g. the file extension will look like {transferid}.{jobpartNumber}.ndmpart
+ internal const string FileExtension = ".ndmpart";
internal const int JobPartLength = 5;
internal const int IdSize = 36; // Size of a guid with hyphens
- internal const int CustomHeaderMaxBytes = 256;
// UTF-8 encoding, so 2 bytes per char
internal const int VersionStrLength = 2;
internal const int VersionStrNumBytes = VersionStrLength * 2;
- internal const int TransferIdStrLength = 36;
- internal const int TransferIdStrNumBytes = TransferIdStrLength * 2;
- internal const int ResourceIdMaxStrLength = 20;
- internal const int ResourceIdNumBytes = ResourceIdMaxStrLength * 2;
- internal const int PathStrMaxLength = 4096;
- internal const int PathStrNumBytes = PathStrMaxLength * 2;
- internal const int ExtraQueryMaxLength = 1000;
- internal const int ExtraQueryNumBytes = ExtraQueryMaxLength * 2;
- internal const int HeaderValueMaxLength = 1000;
- internal const int HeaderValueNumBytes = HeaderValueMaxLength * 2;
- internal const int MetadataStrMaxLength = 4096;
- internal const int MetadataStrNumBytes = MetadataStrMaxLength * 2;
- internal const int BlobTagsStrMaxLength = 4096;
- internal const int BlobTagsStrNumBytes = BlobTagsStrMaxLength * 2;
+ internal const int TypeIdMaxStrLength = 10;
+ internal const int TypeIdNumBytes = TypeIdMaxStrLength * 2;
- /// Index: 0
- internal const int VersionIndex = 0; // Index: 0
- /// Index: 4
- internal const int StartTimeIndex = VersionIndex + VersionStrNumBytes;
- /// Index: 12
- internal const int TransferIdIndex = StartTimeIndex + LongSizeInBytes;
- /// Index: 84
- internal const int PartNumberIndex = TransferIdIndex + TransferIdStrNumBytes;
- /// Index: 92
- internal const int SourceResourceIdLengthIndex = PartNumberIndex + LongSizeInBytes;
- /// Index: 94
- internal const int SourceResourceIdIndex = SourceResourceIdLengthIndex + UShortSizeInBytes;
-
- /// Index: 134
- internal const int SourcePathLengthIndex = SourceResourceIdIndex + ResourceIdNumBytes;
- /// Index: 136
- internal const int SourcePathIndex = SourcePathLengthIndex + UShortSizeInBytes;
- /// Index: 8328
- internal const int SourceExtraQueryLengthIndex = SourcePathIndex + PathStrNumBytes;
- /// Index: 8330
- internal const int SourceExtraQueryIndex = SourceExtraQueryLengthIndex + UShortSizeInBytes;
- /// Index: 10330
- internal const int DestinationResourceIdLengthIndex = SourceExtraQueryIndex + ExtraQueryNumBytes;
- /// Index: 10332
- internal const int DestinationResourceIdIndex = DestinationResourceIdLengthIndex + UShortSizeInBytes;
- /// Index: 10372
- internal const int DestinationPathLengthIndex = DestinationResourceIdIndex + ResourceIdNumBytes;
- /// Index: 10374
- internal const int DestinationPathIndex = DestinationPathLengthIndex + UShortSizeInBytes;
- /// Index: 18566
- internal const int DestinationExtraQueryLengthIndex = DestinationPathIndex + PathStrNumBytes;
- /// Index: 18568
- internal const int DestinationExtraQueryIndex = DestinationExtraQueryLengthIndex + UShortSizeInBytes;
- /// Index: 20568
- internal const int IsFinalPartIndex = DestinationExtraQueryIndex + ExtraQueryNumBytes;
- /// Index: 20569
- internal const int ForceWriteIndex = IsFinalPartIndex + OneByte;
- /// Index: 20570
- internal const int ForceIfReadOnlyIndex = ForceWriteIndex + OneByte;
- /// Index: 20571
- internal const int AutoDecompressIndex = ForceIfReadOnlyIndex + OneByte;
- /// Index: 20572
- internal const int PriorityIndex = AutoDecompressIndex + OneByte;
- /// Index: 20573
- internal const int TTLAfterCompletionIndex = PriorityIndex + OneByte;
- /// Index: 20581
- internal const int FromToIndex = TTLAfterCompletionIndex + LongSizeInBytes;
- /// Index: 20582
- internal const int FolderPropertyModeIndex = FromToIndex + OneByte;
- /// Index: 20583
- internal const int NumberChunksIndex = FolderPropertyModeIndex + OneByte;
-
- // JobPartPlanDestinationBlob Indexes
- /// Index: 20591
- internal const int DstBlobTypeIndex = NumberChunksIndex + LongSizeInBytes;
- /// Index: 20592
- internal const int DstBlobNoGuessMimeTypeIndex = DstBlobTypeIndex + OneByte;
- /// Index: 20593
- internal const int DstBlobContentTypeLengthIndex = DstBlobNoGuessMimeTypeIndex + OneByte;
- /// Index: 20595
- internal const int DstBlobContentTypeIndex = DstBlobContentTypeLengthIndex + UShortSizeInBytes;
- /// Index: 22595
- internal const int DstBlobContentEncodingLengthIndex = DstBlobContentTypeIndex + HeaderValueNumBytes;
- /// Index: 22597
- internal const int DstBlobContentEncodingIndex = DstBlobContentEncodingLengthIndex + UShortSizeInBytes;
- /// Index: 24597
- internal const int DstBlobContentLanguageLengthIndex = DstBlobContentEncodingIndex + HeaderValueNumBytes;
- /// Index: 24599
- internal const int DstBlobContentLanguageIndex = DstBlobContentLanguageLengthIndex + UShortSizeInBytes;
- /// Index: 26599
- internal const int DstBlobContentDispositionLengthIndex = DstBlobContentLanguageIndex + HeaderValueNumBytes;
- /// Index: 26601
- internal const int DstBlobContentDispositionIndex = DstBlobContentDispositionLengthIndex + UShortSizeInBytes;
- /// Index: 28601
- internal const int DstBlobCacheControlLengthIndex = DstBlobContentDispositionIndex + HeaderValueNumBytes;
- /// Index: 28603
- internal const int DstBlobCacheControlIndex = DstBlobCacheControlLengthIndex + UShortSizeInBytes;
- /// Index: 30603
- internal const int DstBlobBlockBlobTierIndex = DstBlobCacheControlIndex + HeaderValueNumBytes;
- /// Index: 30604
- internal const int DstBlobPageBlobTierIndex = DstBlobBlockBlobTierIndex + OneByte;
- /// Index: 30605
- internal const int DstBlobPutMd5Index = DstBlobPageBlobTierIndex + OneByte;
- /// Index: 30606
- internal const int DstBlobMetadataLengthIndex = DstBlobPutMd5Index + OneByte;
- /// Index: 30608
- internal const int DstBlobMetadataIndex = DstBlobMetadataLengthIndex + UShortSizeInBytes;
- /// Index: 38800
- internal const int DstBlobTagsLengthIndex = DstBlobMetadataIndex + MetadataStrNumBytes;
- /// Index: 38808
- internal const int DstBlobTagsIndex = DstBlobTagsLengthIndex + LongSizeInBytes;
- /// Index: 47000
- internal const int DstBlobIsSourceEncrypted = DstBlobTagsIndex + BlobTagsStrNumBytes;
- /// Index: 47001
- internal const int DstBlobCpkScopeInfoLengthIndex = DstBlobIsSourceEncrypted + OneByte;
- /// Index: 47003
- internal const int DstBlobCpkScopeInfoIndex = DstBlobCpkScopeInfoLengthIndex + UShortSizeInBytes;
- /// Index: 49003
- internal const int DstBlobBlockSizeIndex = DstBlobCpkScopeInfoIndex + HeaderValueNumBytes;
-
- // JobPartPlanDestinationLocal Indexes
- /// Index: 49011
- internal const int DstLocalPreserveLastModifiedTimeIndex = DstBlobBlockSizeIndex + LongSizeInBytes;
- /// Index: 49012
- internal const int DstLocalMD5VerificationOptionIndex = DstLocalPreserveLastModifiedTimeIndex + OneByte;
-
- /// Index: 49013
- internal const int PreserveSMBPermissionsIndex = DstLocalMD5VerificationOptionIndex + OneByte;
- /// Index: 49014
- internal const int PreserveSMBInfoIndex = PreserveSMBPermissionsIndex + OneByte;
- /// Index: 49015
- internal const int S2SGetPropertiesInBackendIndex = PreserveSMBInfoIndex + OneByte;
- /// Index: 49016
- internal const int S2SSourceChangeValidationIndex = S2SGetPropertiesInBackendIndex + OneByte;
- /// Index: 49017
- internal const int DestLengthValidationIndex = S2SSourceChangeValidationIndex + OneByte;
- /// Index: 49018
- internal const int S2SInvalidMetadataHandleOptionIndex = DestLengthValidationIndex + OneByte;
- /// Index: 49019
- internal const int DeleteSnapshotsOptionIndex = S2SInvalidMetadataHandleOptionIndex + OneByte;
- /// Index: 49020
- internal const int PermanentDeleteOptionIndex = DeleteSnapshotsOptionIndex + OneByte;
- /// Index: 49021
- internal const int RehydratePriorityTypeIndex = PermanentDeleteOptionIndex + OneByte;
- /// Index: 49022
- internal const int AtomicJobStatusStateIndex = RehydratePriorityTypeIndex + OneByte;
- /// Index: 49023
- internal const int AtomicJobStatusHasFailedIndex = AtomicJobStatusStateIndex + OneByte;
- /// Index: 49024
- internal const int AtomicJobStatusHasSkippedIndex = AtomicJobStatusHasFailedIndex + OneByte;
- /// Index: 49025
- internal const int AtomicPartStatusStateIndex = AtomicJobStatusHasSkippedIndex + OneByte;
- /// Index: 49026
- internal const int AtomicPartStatusHasFailedIndex = AtomicPartStatusStateIndex + OneByte;
- /// Index: 49027
- internal const int AtomicPartStatusHasSkippedIndex = AtomicPartStatusHasFailedIndex + OneByte;
- ///
- /// Size of the JobPart Header: 49029
- ///
- internal const int JobPartHeaderSizeInBytes = AtomicPartStatusHasSkippedIndex + OneByte;
+ internal const int VersionIndex = 0;
+ internal const int TransferIdIndex = VersionIndex + VersionStrNumBytes;
+ internal const int PartNumberIndex = TransferIdIndex + GuidSizeInBytes;
+ internal const int CreateTimeIndex = PartNumberIndex + LongSizeInBytes;
+ internal const int SourceTypeIdIndex = CreateTimeIndex + LongSizeInBytes;
+ internal const int DestinationTypeIdIndex = SourceTypeIdIndex + TypeIdNumBytes;
+ internal const int SourcePathOffsetIndex = DestinationTypeIdIndex + TypeIdNumBytes;
+ internal const int SourcePathLengthIndex = SourcePathOffsetIndex + IntSizeInBytes;
+ internal const int DestinationPathOffsetIndex = SourcePathLengthIndex + IntSizeInBytes;
+ internal const int DestinationPathLengthIndex = DestinationPathOffsetIndex + IntSizeInBytes;
+ internal const int OverwriteIndex = DestinationPathLengthIndex + IntSizeInBytes;
+ internal const int InitialTransferSizeIndex = OverwriteIndex + OneByte;
+ internal const int ChunkSizeIndex = InitialTransferSizeIndex + LongSizeInBytes;
+ internal const int PriorityIndex = ChunkSizeIndex + LongSizeInBytes;
+ internal const int JobPartStatusIndex = PriorityIndex + OneByte;
+ internal const int VariableLengthStartIndex = JobPartStatusIndex + IntSizeInBytes;
}
internal static class ErrorCode
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementExtensions.cs b/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementExtensions.cs
index de00ca6dbce4c..ad752a7d8a33c 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementExtensions.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/Shared/DataMovementExtensions.cs
@@ -29,7 +29,7 @@ public static async Task ToJobPartAsync(
JobPartPlanHeader header = JobPartPlanHeader.Deserialize(planFileStream);
// Apply credentials to the saved transfer job path
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
StreamToUriJobPart jobPart = await StreamToUriJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -54,7 +54,7 @@ public static async Task ToJobPartAsync(
JobPartPlanHeader header = JobPartPlanHeader.Deserialize(planFileStream);
// Apply credentials to the saved transfer job path
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
ServiceToServiceJobPart jobPart = await ServiceToServiceJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -79,7 +79,7 @@ public static async Task ToJobPartAsync(
JobPartPlanHeader header = JobPartPlanHeader.Deserialize(planFileStream);
// Apply credentials to the saved transfer job path
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
UriToStreamJobPart jobPart = await UriToStreamJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -108,7 +108,7 @@ public static async Task ToJobPartAsync(
string childSourceName = childSourcePath.Substring(sourceResource.Uri.AbsoluteUri.Length + 1);
string childDestinationPath = header.DestinationPath;
string childDestinationName = childDestinationPath.Substring(destinationResource.Uri.AbsoluteUri.Length + 1);
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
StreamToUriJobPart jobPart = await StreamToUriJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -135,7 +135,7 @@ public static async Task ToJobPartAsync(
// Apply credentials to the saved transfer job path
string childSourcePath = header.SourcePath;
string childDestinationPath = header.DestinationPath;
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
ServiceToServiceJobPart jobPart = await ServiceToServiceJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -164,7 +164,7 @@ public static async Task ToJobPartAsync(
string childSourceName = childSourcePath.Substring(sourceResource.Uri.AbsoluteUri.Length + 1);
string childDestinationPath = header.DestinationPath;
string childDestinationName = childDestinationPath.Substring(destinationResource.Uri.AbsoluteUri.Length + 1);
- DataTransferStatus jobPartStatus = header.AtomicJobStatus;
+ DataTransferStatus jobPartStatus = header.JobPartStatus;
UriToStreamJobPart jobPart = await UriToStreamJobPart.CreateJobPartAsync(
job: baseJob,
partNumber: Convert.ToInt32(header.PartNumber),
@@ -182,65 +182,25 @@ public static async Task ToJobPartAsync(
///
/// Translate the initial job part header to a job plan format file
///
- internal static JobPartPlanHeader ToJobPartPlanHeader(this JobPartInternal jobPart, DataTransferStatus jobStatus)
+ internal static JobPartPlanHeader ToJobPartPlanHeader(this JobPartInternal jobPart)
{
- JobPartPlanDestinationBlob dstBlobData = new JobPartPlanDestinationBlob(
- blobType: JobPlanBlobType.Detect, // TODO: update when supported
- noGuessMimeType: false, // TODO: update when supported
- contentType: "", // TODO: update when supported
- contentEncoding: "", // TODO: update when supported
- contentLanguage: "", // TODO: update when supported
- contentDisposition: "", // TODO: update when supported
- cacheControl: "", // TODO: update when supported
- blockBlobTier: JobPartPlanBlockBlobTier.None,// TODO: update when supported
- pageBlobTier: JobPartPlanPageBlobTier.None,// TODO: update when supported
- putMd5: false,// TODO: update when supported
- metadata: "",// TODO: update when supported
- blobTags: "",// TODO: update when supported
- isSourceEncrypted: false,// TODO: update when supported
- cpkScopeInfo: "",// TODO: update when supported
- blockSize: jobPart._maximumTransferChunkSize);
-
- JobPartPlanDestinationLocal dstLocalData = new JobPartPlanDestinationLocal(
- preserveLastModifiedTime: false, // TODO: update when supported
- checksumVerificationOption: 0); // TODO: update when supported
-
string sourcePath = jobPart._sourceResource.Uri.ToSanitizedString();
string destinationPath = jobPart._destinationResource.Uri.ToSanitizedString();
return new JobPartPlanHeader(
version: DataMovementConstants.JobPartPlanFile.SchemaVersion,
- startTime: DateTimeOffset.UtcNow, // TODO: update to job start time
transferId: jobPart._dataTransfer.Id,
- partNumber: (uint)jobPart.PartNumber,
- sourceResourceId: jobPart._sourceResource.ResourceId,
+ partNumber: jobPart.PartNumber,
+ createTime: DateTimeOffset.UtcNow,
+ sourceTypeId: jobPart._sourceResource.ResourceId,
+ destinationTypeId: jobPart._destinationResource.ResourceId,
sourcePath: sourcePath,
- sourceExtraQuery: "", // TODO: convert options to string
- destinationResourceId: jobPart._destinationResource.ResourceId,
destinationPath: destinationPath,
- destinationExtraQuery: "", // TODO: convert options to string
- isFinalPart: false,
- forceWrite: jobPart._createMode == StorageResourceCreationPreference.OverwriteIfExists, // TODO: change to enum value
- forceIfReadOnly: false, // TODO: revisit for Azure Files
- autoDecompress: false, // TODO: revisit if we want to support this feature
+ overwrite: jobPart._createMode == StorageResourceCreationPreference.OverwriteIfExists,
+ initialTransferSize: jobPart._initialTransferSize,
+ chunkSize: jobPart._maximumTransferChunkSize,
priority: 0, // TODO: add priority feature
- ttlAfterCompletion: DateTimeOffset.MinValue, // TODO: revisit for Azure Files
- jobPlanOperation: 0, // TODO: revisit when we add this feature
- folderPropertyMode: FolderPropertiesMode.None, // TODO: revisit for Azure Files
- numberChunks: 0, // TODO: revisit when added
- dstBlobData: dstBlobData, // TODO: revisit when we add feature to cache this info
- dstLocalData: dstLocalData, // TODO: revisit when we add feature to cache this info
- preserveSMBPermissions: false, // TODO: revisit for Azure Files
- preserveSMBInfo: false, // TODO: revisit for Azure Files
- s2sGetPropertiesInBackend: false, // TODO: revisit for Azure Files
- s2sSourceChangeValidation: false, // TODO: revisit for Azure Files
- destLengthValidation: false, // TODO: revisit when features is added
- s2sInvalidMetadataHandleOption: 0, // TODO: revisit when supported
- deleteSnapshotsOption: JobPartDeleteSnapshotsOption.None, // TODO: revisit when feature is added
- permanentDeleteOption: JobPartPermanentDeleteOption.None, // TODO: revisit when feature is added
- rehydratePriorityType: JobPartPlanRehydratePriorityType.None, // TODO: revisit when feature is added
- atomicJobStatus: jobStatus,
- atomicPartStatus: jobPart.JobPartStatus);
+ jobPartStatus: jobPart.JobPartStatus);
}
///
@@ -278,10 +238,10 @@ internal static void VerifyJobPartPlanHeader(this JobPartInternal jobPart, JobPa
}
// Check CreateMode / Overwrite
- if ((header.ForceWrite && jobPart._createMode != StorageResourceCreationPreference.OverwriteIfExists) ||
- (!header.ForceWrite && jobPart._createMode == StorageResourceCreationPreference.OverwriteIfExists))
+ if ((header.Overwrite && jobPart._createMode != StorageResourceCreationPreference.OverwriteIfExists) ||
+ (!header.Overwrite && jobPart._createMode == StorageResourceCreationPreference.OverwriteIfExists))
{
- throw Errors.MismatchResumeCreateMode(header.ForceWrite, jobPart._createMode);
+ throw Errors.MismatchResumeCreateMode(header.Overwrite, jobPart._createMode);
}
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/Shared/Errors.DataMovement.cs b/sdk/storage/Azure.Storage.DataMovement/src/Shared/Errors.DataMovement.cs
index 69a9fd5bf724c..8795df72decf9 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/Shared/Errors.DataMovement.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/Shared/Errors.DataMovement.cs
@@ -58,6 +58,9 @@ public static ArgumentException CollisionJobPart(string transferId, int jobPart)
public static ArgumentException MissingCheckpointerPath(string directoryPath)
=> throw new ArgumentException($"Could not initialize the LocalTransferCheckpointer because the folderPath passed does not exist. Please create the {directoryPath}, folder path first.");
+ public static ArgumentException InvalidJobPartFileName(string fileName)
+ => new ArgumentException($"Invalid Checkpoint File: The following checkpoint file contains an invalid file name {fileName}");
+
public static ArgumentException InvalidTransferIdFileName(string fileName)
=> new ArgumentException($"Invalid Checkpoint File: The following checkpoint file contains a Transfer ID that is invalid {fileName}");
@@ -67,14 +70,14 @@ public static ArgumentException InvalidJobPartFileNameExtension(string fileName)
public static ArgumentException InvalidJobPartNumberFileName(string fileName)
=> new ArgumentException($"Invalid Job Part Plan File: The following Job Part Plan file contains an invalid Job Part Number, could not convert to a integer: {fileName}");
- public static ArgumentException InvalidSchemaVersionFileName(string schemaVersion)
- => new ArgumentException($"Invalid Job Part Plan File: Job Part Schema version: {schemaVersion} does not match the Schema Version supported by the package: {DataMovementConstants.JobPartPlanFile.SchemaVersion}. Please consider altering the package version that supports the respective version.");
+ public static ArgumentException InvalidPartHeaderElementLength(string elementName, int expectedSize, int actualSize)
+ => new ArgumentException($"Invalid Job Part Plan File: Attempt to set element, \"{elementName}\" failed.\n Expected size: {expectedSize}\n Actual Size: {actualSize}");
- public static ArgumentException InvalidPlanFileElement(string elementName, int expectedSize, int actualSize)
- => throw new ArgumentException($"Invalid Job Part Plan File: Attempt to set element, \"{elementName}\" failed.\n Expected size: {expectedSize}\n Actual Size: {actualSize}");
+ public static ArgumentException InvalidPartHeaderElement(string elementName, string elementValue)
+ => new ArgumentException($"Invalid Job Part Plan File: Attempt to set element, \"{elementName}\" with value \"{elementValue}\" failed.");
public static ArgumentException InvalidStringToDictionary(string elementName, string value)
- => throw new ArgumentException($"Invalid Job Part Plan File: Attempt to set element, \"{elementName}\" failed.\n Expected format stored was invalid, \"{value}\"");
+ => new ArgumentException($"Invalid Job Part Plan File: Attempt to set element, \"{elementName}\" failed.\n Expected format stored was invalid, \"{value}\"");
public static IOException LocalFileAlreadyExists(string pathName)
=> new IOException($"File path `{pathName}` already exists. Cannot overwrite file.");
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/StreamToUriJobPart.cs b/sdk/storage/Azure.Storage.DataMovement/src/StreamToUriJobPart.cs
index d1cc783dd054b..ef1ce3ad5cd72 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/StreamToUriJobPart.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/StreamToUriJobPart.cs
@@ -89,7 +89,7 @@ public static async Task CreateJobPartAsync(
{
// Create Job Part file as we're initializing the job part
StreamToUriJobPart part = new StreamToUriJobPart(job, partNumber);
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
return part;
}
@@ -112,7 +112,7 @@ public static async Task CreateJobPartAsync(
length: length);
if (!partPlanFileExists)
{
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
}
return part;
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/TransferCheckpointer.cs b/sdk/storage/Azure.Storage.DataMovement/src/TransferCheckpointer.cs
index aaeeed368b9e2..879fcf91bd3b1 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/TransferCheckpointer.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/TransferCheckpointer.cs
@@ -44,7 +44,6 @@ public abstract Task AddNewJobAsync(
///
/// The transfer ID.
/// The job part number.
- /// The total chunks for the part.
/// A to the job part plan header.
///
/// Optional to propagate
@@ -53,7 +52,6 @@ public abstract Task AddNewJobAsync(
public abstract Task AddNewJobPartAsync(
string transferId,
int partNumber,
- int chunksTotal,
Stream headerStream,
CancellationToken cancellationToken = default);
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/UriToStreamJobPart.cs b/sdk/storage/Azure.Storage.DataMovement/src/UriToStreamJobPart.cs
index bc86ff3a50ffd..79dc77ffa5842 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/UriToStreamJobPart.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/UriToStreamJobPart.cs
@@ -85,7 +85,7 @@ public static async Task CreateJobPartAsync(
{
// Create Job Part file as we're initializing the job part
UriToStreamJobPart part = new UriToStreamJobPart(job, partNumber);
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
return part;
}
@@ -108,7 +108,7 @@ public static async Task CreateJobPartAsync(
length: length);
if (!partPlanFileExists)
{
- await part.AddJobPartToCheckpointerAsync(1).ConfigureAwait(false); // For now we only store 1 chunk
+ await part.AddJobPartToCheckpointerAsync().ConfigureAwait(false);
}
return part;
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/CheckpointerTesting.cs b/sdk/storage/Azure.Storage.DataMovement/tests/CheckpointerTesting.cs
index 2947248c6c5d4..17787870cccdb 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/CheckpointerTesting.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/CheckpointerTesting.cs
@@ -2,8 +2,6 @@
// Licensed under the MIT License.
using Azure.Storage.DataMovement.JobPlan;
-using Azure.Storage.Test;
-using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System;
@@ -11,191 +9,83 @@
namespace Azure.Storage.DataMovement.Tests
{
- internal class CheckpointerTesting
+ internal static class CheckpointerTesting
{
- private const int KB = 1024;
- private const int MB = 1024 * KB;
- internal const string DefaultTransferId =
- "c591bacc-5552-4c5c-b068-552685ec5cd5";
+ internal const string DefaultTransferId = "c591bacc-5552-4c5c-b068-552685ec5cd5";
internal const long DefaultPartNumber = 5;
- internal static readonly DateTimeOffset DefaultStartTime
- = new DateTimeOffset(2023, 03, 13, 15, 24, 6, default);
internal const string DefaultSourceProviderId = "test";
- internal const string DefaultSourceResourceId = "LocalFile";
+ internal const string DefaultSourceTypeId = "LocalFile";
internal const string DefaultSourcePath = "C:/sample-source";
internal const string DefaultWebSourcePath = "https://example.com/source";
- internal const string DefaultSourceQuery = "sourcequery";
internal const string DefaultDestinationProviderId = "test";
- internal const string DefaultDestinationResourceId = "LocalFile";
+ internal const string DefaultDestinationTypeId = "BlockBlob";
internal const string DefaultDestinationPath = "C:/sample-destination";
internal const string DefaultWebDestinationPath = "https://example.com/destination";
- internal const string DefaultDestinationQuery = "destquery";
+ internal const long DefaultInitialTransferSize = 32 * Constants.MB;
+ internal const long DefaultChunkSize = 4 * Constants.MB;
internal const byte DefaultPriority = 0;
- internal static readonly DateTimeOffset DefaultTtlAfterCompletion = DateTimeOffset.MaxValue;
internal const JobPlanOperation DefaultJobPlanOperation = JobPlanOperation.Upload;
- internal const FolderPropertiesMode DefaultFolderPropertiesMode = FolderPropertiesMode.None;
- internal const long DefaultNumberChunks = 1;
- internal const JobPlanBlobType DefaultBlobType = JobPlanBlobType.BlockBlob;
- internal const string DefaultContentType = "ContentType / type";
- internal const string DefaultContentEncoding = "UTF8";
- internal const string DefaultContentLanguage = "content-language";
- internal const string DefaultContentDisposition = "content-disposition";
- internal const string DefaultCacheControl = "cache-control";
- internal const JobPartPlanBlockBlobTier DefaultBlockBlobTier = JobPartPlanBlockBlobTier.None;
- internal const JobPartPlanPageBlobTier DefaultPageBlobTier = JobPartPlanPageBlobTier.None;
- internal const string DefaultCpkScopeInfo = "cpk-scope-info";
- internal const long DefaultBlockSize = 4 * KB;
+ internal const long DefaultBlockSize = 4 * Constants.KB;
internal const byte DefaultS2sInvalidMetadataHandleOption = 0;
internal const byte DefaultChecksumVerificationOption = 0;
- internal const JobPartDeleteSnapshotsOption DefaultDeleteSnapshotsOption = JobPartDeleteSnapshotsOption.None;
- internal const JobPartPermanentDeleteOption DefaultPermanentDeleteOption = JobPartPermanentDeleteOption.None;
- internal const JobPartPlanRehydratePriorityType DefaultRehydratePriorityType = JobPartPlanRehydratePriorityType.None;
+ internal static readonly DateTimeOffset DefaultCreateTime = new DateTimeOffset(2023, 08, 28, 17, 26, 0, default);
internal static readonly DataTransferStatus DefaultJobStatus = new DataTransferStatus(DataTransferState.Queued, false, false);
internal static readonly DataTransferStatus DefaultPartStatus = new DataTransferStatus(DataTransferState.Queued, false, false);
- internal static readonly DateTimeOffset DefaultCreateTime = new DateTimeOffset(2023, 08, 28, 17, 26, 0, default);
internal static JobPartPlanHeader CreateDefaultJobPartHeader(
string version = DataMovementConstants.JobPartPlanFile.SchemaVersion,
- DateTimeOffset startTime = default,
string transferId = DefaultTransferId,
long partNumber = DefaultPartNumber,
- string sourceResourceId = DefaultSourceResourceId,
+ DateTimeOffset createTime = default,
+ string sourceTypeId = DefaultSourceTypeId,
+ string destinationTypeId = DefaultDestinationTypeId,
string sourcePath = DefaultSourcePath,
- string sourceExtraQuery = DefaultSourceQuery,
- string destinationResourceId = DefaultDestinationResourceId,
string destinationPath = DefaultDestinationPath,
- string destinationExtraQuery = DefaultDestinationQuery,
- bool isFinalPart = false,
- bool forceWrite = false,
- bool forceIfReadOnly = false,
- bool autoDecompress = false,
+ bool overwrite = false,
+ long initialTransferSize = DefaultInitialTransferSize,
+ long chunkSize = DefaultChunkSize,
byte priority = DefaultPriority,
- DateTimeOffset ttlAfterCompletion = default,
- JobPlanOperation fromTo = DefaultJobPlanOperation,
- FolderPropertiesMode folderPropertyMode = DefaultFolderPropertiesMode,
- long numberChunks = DefaultNumberChunks,
- JobPlanBlobType blobType = DefaultBlobType,
- bool noGuessMimeType = false,
- string contentType = DefaultContentType,
- string contentEncoding = DefaultContentEncoding,
- string contentLanguage = DefaultContentLanguage,
- string contentDisposition = DefaultContentDisposition,
- string cacheControl = DefaultCacheControl,
- JobPartPlanBlockBlobTier blockBlobTier = DefaultBlockBlobTier,
- JobPartPlanPageBlobTier pageBlobTier = DefaultPageBlobTier,
- bool putMd5 = false,
- IDictionary metadata = default,
- IDictionary blobTags = default,
- bool isSourceEncrypted = false,
- string cpkScopeInfo = DefaultCpkScopeInfo,
- long blockSize = DefaultBlockSize,
- bool preserveLastModifiedTime = false,
- byte checksumVerificationOption = DefaultChecksumVerificationOption,
- bool preserveSMBPermissions = false,
- bool preserveSMBInfo = false,
- bool s2sGetPropertiesInBackend = false,
- bool s2sSourceChangeValidation = false,
- bool destLengthValidation = false,
- byte s2sInvalidMetadataHandleOption = DefaultS2sInvalidMetadataHandleOption,
- JobPartDeleteSnapshotsOption deleteSnapshotsOption = DefaultDeleteSnapshotsOption,
- JobPartPermanentDeleteOption permanentDeleteOption = DefaultPermanentDeleteOption,
- JobPartPlanRehydratePriorityType rehydratePriorityType = DefaultRehydratePriorityType,
- DataTransferStatus atomicJobStatus = default,
- DataTransferStatus atomicPartStatus = default)
+ DataTransferStatus jobPartStatus = default)
{
- if (startTime == default)
- {
- startTime = DefaultStartTime;
- }
- if (ttlAfterCompletion == default)
+ if (createTime == default)
{
- ttlAfterCompletion = DefaultTtlAfterCompletion;
+ createTime = DefaultCreateTime;
}
- metadata ??= DataProvider.BuildMetadata();
- blobTags ??= DataProvider.BuildTags();
- atomicJobStatus ??= DefaultJobStatus;
- atomicPartStatus ??= DefaultPartStatus;
-
- JobPartPlanDestinationBlob dstBlobData = new JobPartPlanDestinationBlob(
- blobType: blobType,
- noGuessMimeType: noGuessMimeType,
- contentType: contentType,
- contentEncoding: contentEncoding,
- contentLanguage: contentLanguage,
- contentDisposition: contentDisposition,
- cacheControl: cacheControl,
- blockBlobTier: blockBlobTier,
- pageBlobTier: pageBlobTier,
- putMd5: putMd5,
- metadata: metadata,
- blobTags: blobTags,
- isSourceEncrypted: isSourceEncrypted,
- cpkScopeInfo: cpkScopeInfo,
- blockSize: blockSize);
-
- JobPartPlanDestinationLocal dstLocalData = new JobPartPlanDestinationLocal(
- preserveLastModifiedTime: preserveLastModifiedTime,
- checksumVerificationOption: checksumVerificationOption);
+ jobPartStatus ??= DefaultPartStatus;
return new JobPartPlanHeader(
- version: version,
- startTime: startTime,
- transferId: transferId,
- partNumber: partNumber,
- sourceResourceId: sourceResourceId,
- sourcePath: sourcePath,
- sourceExtraQuery: sourceExtraQuery,
- destinationResourceId: destinationResourceId,
- destinationPath: destinationPath,
- destinationExtraQuery: destinationExtraQuery,
- isFinalPart: isFinalPart,
- forceWrite: forceWrite,
- forceIfReadOnly: forceIfReadOnly,
- autoDecompress: autoDecompress,
- priority: priority,
- ttlAfterCompletion: ttlAfterCompletion,
- jobPlanOperation: fromTo,
- folderPropertyMode: folderPropertyMode,
- numberChunks: numberChunks,
- dstBlobData: dstBlobData,
- dstLocalData: dstLocalData,
- preserveSMBPermissions: preserveSMBPermissions,
- preserveSMBInfo: preserveSMBInfo,
- s2sGetPropertiesInBackend: s2sGetPropertiesInBackend,
- s2sSourceChangeValidation: s2sSourceChangeValidation,
- destLengthValidation: destLengthValidation,
- s2sInvalidMetadataHandleOption: s2sInvalidMetadataHandleOption,
- deleteSnapshotsOption: deleteSnapshotsOption,
- permanentDeleteOption: permanentDeleteOption,
- rehydratePriorityType: rehydratePriorityType,
- atomicJobStatus: atomicJobStatus,
- atomicPartStatus: atomicPartStatus);
+ version,
+ transferId,
+ partNumber,
+ createTime,
+ sourceTypeId,
+ destinationTypeId,
+ sourcePath,
+ destinationPath,
+ overwrite,
+ initialTransferSize,
+ chunkSize,
+ priority,
+ jobPartStatus);
}
- internal static async Task AssertJobPlanHeaderAsync(JobPartPlanHeader header, Stream stream)
+ internal static async Task AssertJobPlanHeaderAsync(
+ this TransferCheckpointer checkpointer,
+ string transferId,
+ int partNumber,
+ JobPartPlanHeader expectedHeader)
{
- int headerSize = DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes;
- using var originalHeaderStream = new MemoryStream(headerSize);
- header.Serialize(originalHeaderStream);
- originalHeaderStream.Seek(0, SeekOrigin.Begin);
- stream.Seek(0, SeekOrigin.Begin);
-
- for (var i = 0; i < headerSize; i += (int)DefaultBlockSize * 5 / 2)
+ JobPartPlanHeader actualHeader;
+ using (Stream actualStream = await checkpointer.ReadJobPartPlanFileAsync(
+ transferId: transferId,
+ partNumber: partNumber,
+ offset: 0,
+ length: 0)) // Read whole file
{
- var startIndex = i;
- var count = Math.Min((int)DefaultBlockSize, (int)(headerSize - startIndex));
-
- var buffer = new byte[count];
- var actual = new byte[count];
- stream.Seek(i, SeekOrigin.Begin);
- originalHeaderStream.Seek(i, SeekOrigin.Begin);
- await stream.ReadAsync(buffer, 0, count);
- await originalHeaderStream.ReadAsync(actual, 0, count);
-
- CollectionAssert.AreEqual(
- actual,
- buffer);
+ actualHeader = JobPartPlanHeader.Deserialize(actualStream);
}
+
+ Assert.That(actualHeader.Equals(expectedHeader));
}
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanFileNameTests.cs b/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanFileNameTests.cs
index 4d08a6904208c..af1352f5e7fdd 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanFileNameTests.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanFileNameTests.cs
@@ -11,8 +11,6 @@ namespace Azure.Storage.DataMovement.Tests
{
public class JobPartPlanFileNameTests
{
- private string schemaVersion = DataMovementConstants.JobPartPlanFile.SchemaVersion;
-
public JobPartPlanFileNameTests()
{
}
@@ -20,120 +18,99 @@ public JobPartPlanFileNameTests()
[Test]
public void Ctor()
{
- // "12345678-1234-1234-1234-123456789abc--001.steV01"
+ // "12345678-1234-1234-1234-123456789abc.00001.ndmpart"
// Transfer Id: 12345678-1234-1234-1234-123456789abc
- // Part Num: 001
- JobPartPlanFileName jobFileName = new JobPartPlanFileName($"12345678-1234-1234-1234-123456789abc--00001.steV{schemaVersion}");
+ // Part Num: 1
+ JobPartPlanFileName jobFileName = new JobPartPlanFileName($"12345678-1234-1234-1234-123456789abc.00001.ndmpart");
Assert.AreEqual("", jobFileName.PrefixPath);
Assert.AreEqual("12345678-1234-1234-1234-123456789abc", jobFileName.Id);
Assert.AreEqual(1, jobFileName.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName.SchemaVersion);
- // "randomtransferidthataddsupto36charac--jobpart.steV01"
+ // "randomtransferidthataddsupto36charac.jobpart.ndmpart"
// Transfer Id: randomtransferidthataddsupto36charac
- // Part Num: 001
- JobPartPlanFileName jobFileName2 = new JobPartPlanFileName($"randomtransferidthataddsupto36charac--00001.steV{schemaVersion}");
+ // Part Num: 1
+ JobPartPlanFileName jobFileName2 = new JobPartPlanFileName($"randomtransferidthataddsupto36charac.00210.ndmpart");
Assert.AreEqual("", jobFileName.PrefixPath);
Assert.AreEqual("randomtransferidthataddsupto36charac", jobFileName2.Id);
- Assert.AreEqual(1, jobFileName2.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName2.SchemaVersion);
-
- // "abcdefgh-abcd-abcd-abcd-123456789abc.steV02"
- // Transfer Id: abcdefgh-abcd-abcd-abcd-123456789abc
- // Part Num: 210
- JobPartPlanFileName jobFileName3 = new JobPartPlanFileName($"abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV{schemaVersion}");
-
- Assert.AreEqual("", jobFileName.PrefixPath);
- Assert.AreEqual("abcdefgh-abcd-abcd-abcd-123456789abc", jobFileName3.Id);
- Assert.AreEqual(210, jobFileName3.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName3.SchemaVersion);
+ Assert.AreEqual(210, jobFileName2.JobPartNumber);
}
[Test]
public void Ctor_FullPath()
{
- // "12345678-1234-1234-1234-123456789abc--00001.steV01"
+ // "12345678-1234-1234-1234-123456789abc.00001.ndmpart"
// Transfer Id: 12345678-1234-1234-1234-123456789abc
- // Part Num: 001
+ // Part Num: 1
string tempPath = Path.GetTempPath().TrimEnd(Path.DirectorySeparatorChar);
- string pathName1 = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc--00001.steV{schemaVersion}");
+ string pathName1 = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc.00001.ndmpart");
JobPartPlanFileName jobFileName = new JobPartPlanFileName(pathName1);
Assert.AreEqual(tempPath, jobFileName.PrefixPath);
Assert.AreEqual("12345678-1234-1234-1234-123456789abc", jobFileName.Id);
Assert.AreEqual(1, jobFileName.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName.SchemaVersion);
- // "randomtransferidthataddsupto36charac--00001.steV01"
+ // "randomtransferidthataddsupto36charac.00001.ndmpart"
// Transfer Id: randomtransferidthataddsupto36charac
- // Part Num: 001
- string pathName2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac--00001.steV{schemaVersion}");
+ // Part Num: 1
+ string pathName2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac.00001.ndmpart");
JobPartPlanFileName jobFileName2 = new JobPartPlanFileName(pathName2);
Assert.AreEqual(tempPath, jobFileName2.PrefixPath);
Assert.AreEqual("randomtransferidthataddsupto36charac", jobFileName2.Id);
Assert.AreEqual(1, jobFileName2.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName2.SchemaVersion);
- // "abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV02"
+ // "abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart"
// Transfer Id: abcdefgh-abcd-abcd-abcd-123456789abc
// Part Num: 210
string prefixPath3 = Path.Combine("folder", "sub");
- string pathName3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV{schemaVersion}");
+ string pathName3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart");
JobPartPlanFileName jobFileName3 = new JobPartPlanFileName(pathName3);
Assert.AreEqual(prefixPath3, jobFileName3.PrefixPath);
Assert.AreEqual("abcdefgh-abcd-abcd-abcd-123456789abc", jobFileName3.Id);
Assert.AreEqual(210, jobFileName3.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName3.SchemaVersion);
}
[Test]
public void Ctor_Divided()
{
- // "12345678-1234-1234-1234-123456789abc--001.steV01"
+ // "12345678-1234-1234-1234-123456789abc.001.ndmpart"
// Transfer Id: 12345678-1234-1234-1234-123456789abc
// Part Num: 001
JobPartPlanFileName jobFileName = new JobPartPlanFileName(
checkpointerPath: "C:\\folder\\subfolder",
id: "12345678-1234-1234-1234-123456789abc",
- jobPartNumber: 1,
- schemaVersion: schemaVersion);
+ jobPartNumber: 1);
Assert.AreEqual("C:\\folder\\subfolder", jobFileName.PrefixPath);
Assert.AreEqual("12345678-1234-1234-1234-123456789abc", jobFileName.Id);
Assert.AreEqual(1, jobFileName.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName.SchemaVersion);
- // "randomtransferidthataddsupto36charac--jobpart.steV01"
+ // "randomtransferidthataddsupto36charac.jobpart.ndmpart"
// Transfer Id: randomtransferidthataddsupto36charac
- // Part Num: 001
+ // Part Num: 1
JobPartPlanFileName jobFileName2 = new JobPartPlanFileName(
checkpointerPath: "F:\\folder\\foo",
id: "randomtransferidthataddsupto36charac",
- jobPartNumber: 1,
- schemaVersion: schemaVersion);
+ jobPartNumber: 1);
Assert.AreEqual("F:\\folder\\foo", jobFileName2.PrefixPath);
Assert.AreEqual("randomtransferidthataddsupto36charac", jobFileName2.Id);
Assert.AreEqual(1, jobFileName2.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName2.SchemaVersion);
- // "abcdefgh-abcd-abcd-abcd-123456789abc.steV02"
+ // "abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart"
// Transfer Id: abcdefgh-abcd-abcd-abcd-123456789abc
// Part Num: 210
JobPartPlanFileName jobFileName3 = new JobPartPlanFileName(
checkpointerPath: "\\folder\\sub",
id: "abcdefgh-abcd-abcd-abcd-123456789abc",
- jobPartNumber: 210,
- schemaVersion: schemaVersion);
+ jobPartNumber: 210);
Assert.AreEqual("\\folder\\sub", jobFileName3.PrefixPath);
Assert.AreEqual("abcdefgh-abcd-abcd-abcd-123456789abc", jobFileName3.Id);
Assert.AreEqual(210, jobFileName3.JobPartNumber);
- Assert.AreEqual(schemaVersion, jobFileName3.SchemaVersion);
}
[Test]
@@ -152,56 +129,47 @@ public void Ctor_Error()
e => e.Message.Contains("Invalid Job Part Plan File"));
TestHelper.AssertExpectedException(
- () => new JobPartPlanFileName("invalidJobId--001.steV01"),
+ () => new JobPartPlanFileName("invalidJobId.001.ndmpart"),
e => e.Message.Contains("Invalid Checkpoint File"));
TestHelper.AssertExpectedException(
- () => new JobPartPlanFileName("abcdefgh-abcd-abcd-abcd-123456789abc--XY.steV01"),
+ () => new JobPartPlanFileName("abcdefgh-abcd-abcd-abcd-123456789abc.XY.ndmpart"),
e => e.Message.Contains("Invalid Job Part Plan File"));
TestHelper.AssertExpectedException(
- () => new JobPartPlanFileName("abcdefgh-abcd-abcd-abcd-123456789abc--001.txt"),
+ () => new JobPartPlanFileName("abcdefgh-abcd-abcd-abcd-123456789abc.001.txt"),
e => e.Message.Contains("Invalid Job Part Plan File"));
}
[Test]
public void ToStringTest()
{
- // "12345678-1234-1234-1234-123456789abc--001.steV01"
- string originalPath = $"12345678-1234-1234-1234-123456789abc--00001.steV{schemaVersion}";
+ string originalPath = $"12345678-1234-1234-1234-123456789abc.00001.ndmpart";
JobPartPlanFileName jobFileName = new JobPartPlanFileName(originalPath);
Assert.AreEqual(originalPath, jobFileName.ToString());
- // "randomtransferidthataddsupto36charac--jobpart.steV01"
- string originalPath2 = $"randomtransferidthataddsupto36charac--00001.steV{schemaVersion}";
+ string originalPath2 = $"randomtransferidthataddsupto36charac.00210.ndmpart";
JobPartPlanFileName jobFileName2 = new JobPartPlanFileName(originalPath2);
Assert.AreEqual(originalPath2, jobFileName2.ToString());
-
- // "abcdefgh-abcd-abcd-abcd-123456789abc.steV02"
- string originalPath3 = $"abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV{schemaVersion}";
- JobPartPlanFileName jobFileName3 = new JobPartPlanFileName(originalPath3);
- Assert.AreEqual(originalPath3, jobFileName3.ToString());
}
[Test]
public void ToString_FullPath()
{
- // "C:/folder/subfolder/12345678-1234-1234-1234-123456789abc--00001.steV01"
+ // "C:/folder/subfolder/12345678-1234-1234-1234-123456789abc.00001.ndmpart"
string tempPath = Path.GetTempPath().TrimEnd(Path.DirectorySeparatorChar);
- string originalPath = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc--00001.steV{schemaVersion}");
+ string originalPath = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc.00001.ndmpart");
JobPartPlanFileName jobFileName = new JobPartPlanFileName(originalPath);
Assert.AreEqual(originalPath, jobFileName.ToString());
- // "F:/folder/foo/randomtransferidthataddsupto36charac--00001.steV01"
- string originalPath2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac--00001.steV{schemaVersion}");
+ // "F:/folder/foo/randomtransferidthataddsupto36charac.00001.ndmpart"
+ string originalPath2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac.00001.ndmpart");
JobPartPlanFileName jobFileName2 = new JobPartPlanFileName(originalPath2);
Assert.AreEqual(originalPath2, jobFileName2.ToString());
- // "/folder/sub/abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV02"
- // Transfer Id: abcdefgh-abcd-abcd-abcd-123456789abc
- // Part Num: 210
+ // "/folder/sub/abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart"
string prefixPath3 = Path.Combine("folder", "sub");
- string originalPath3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV{schemaVersion}");
+ string originalPath3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart");
JobPartPlanFileName jobFileName3 = new JobPartPlanFileName(originalPath3);
Assert.AreEqual(originalPath3, jobFileName3.ToString());
}
@@ -209,35 +177,30 @@ public void ToString_FullPath()
[Test]
public void ToString_Divided()
{
- // "C:/folder/subfolder/12345678-1234-1234-1234-123456789abc--00001.steV01"
+ // "C:/folder/subfolder/12345678-1234-1234-1234-123456789abc.00001.ndmpart"
string tempPath = Path.GetTempPath().TrimEnd(Path.DirectorySeparatorChar);
- string originalPath = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc--00001.steV{schemaVersion}");
+ string originalPath = Path.Combine(tempPath, $"12345678-1234-1234-1234-123456789abc.00001.ndmpart");
JobPartPlanFileName jobFileName = new JobPartPlanFileName(
checkpointerPath: tempPath,
id: "12345678-1234-1234-1234-123456789abc",
- jobPartNumber: 1,
- schemaVersion: schemaVersion);
+ jobPartNumber: 1);
Assert.AreEqual(originalPath, jobFileName.ToString());
- // "F:/folder/foo/randomtransferidthataddsupto36charac--00001.steV01"
- string originalPath2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac--00001.steV{schemaVersion}");
+ // "F:/folder/foo/randomtransferidthataddsupto36charac.00001.ndmpart"
+ string originalPath2 = Path.Combine(tempPath, $"randomtransferidthataddsupto36charac.00001.ndmpart");
JobPartPlanFileName jobFileName2 = new JobPartPlanFileName(
checkpointerPath: tempPath,
id: "randomtransferidthataddsupto36charac",
- jobPartNumber: 1,
- schemaVersion: schemaVersion);
+ jobPartNumber: 1);
Assert.AreEqual(originalPath2, jobFileName2.ToString());
- // "/folder/sub/abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV02"
- // Transfer Id: abcdefgh-abcd-abcd-abcd-123456789abc
- // Part Num: 210
+ // "/folder/sub/abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart"
string prefixPath3 = Path.Combine("folder", "sub");
- string originalPath3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc--00210.steV{schemaVersion}");
+ string originalPath3 = Path.Combine(prefixPath3, $"abcdefgh-abcd-abcd-abcd-123456789abc.00210.ndmpart");
JobPartPlanFileName jobFileName3 = new JobPartPlanFileName(
checkpointerPath: prefixPath3,
id: "abcdefgh-abcd-abcd-abcd-123456789abc",
- jobPartNumber: 210,
- schemaVersion: schemaVersion);
+ jobPartNumber: 210);
Assert.AreEqual(originalPath3, jobFileName3.ToString());
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanHeaderTests.cs b/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanHeaderTests.cs
index cd555c0942471..087bcc27f643d 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanHeaderTests.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/JobPartPlanHeaderTests.cs
@@ -2,11 +2,8 @@
// Licensed under the MIT License.
using System;
-using System.Collections.Generic;
using System.IO;
-using System.Threading.Tasks;
using Azure.Storage.DataMovement.JobPlan;
-using Azure.Storage.Test;
using NUnit.Framework;
using static Azure.Storage.DataMovement.Tests.CheckpointerTesting;
@@ -14,17 +11,6 @@ namespace Azure.Storage.DataMovement.Tests
{
public class JobPartPlanHeaderTests : DataMovementTestBase
{
- private static string DictionaryToString(IDictionary dict)
- {
- string concatStr = "";
- foreach (KeyValuePair kv in dict)
- {
- // e.g. store like "header=value;"
- concatStr = string.Concat(concatStr, $"{kv.Key}={kv.Value};");
- }
- return concatStr;
- }
-
public JobPartPlanHeaderTests(bool async) : base(async, default)
{
}
@@ -32,392 +18,42 @@ public JobPartPlanHeaderTests(bool async) : base(async, default)
[Test]
public void Ctor()
{
- IDictionary metadata = DataProvider.BuildMetadata();
- IDictionary blobTags = DataProvider.BuildTags();
-
- JobPartPlanHeader header = CreateDefaultJobPartHeader(
- metadata: metadata,
- blobTags: blobTags);
+ JobPartPlanHeader header = CreateDefaultJobPartHeader();
- Assert.AreEqual(header.Version, DataMovementConstants.JobPartPlanFile.SchemaVersion);
- Assert.AreEqual(header.StartTime, DefaultStartTime);
- Assert.AreEqual(header.TransferId, DefaultTransferId);
- Assert.AreEqual(header.PartNumber, DefaultPartNumber);
- Assert.AreEqual(header.SourceResourceId, DefaultSourceResourceId);
- Assert.AreEqual(header.SourcePath, DefaultSourcePath);
- Assert.AreEqual(header.SourcePathLength, DefaultSourcePath.Length);
- Assert.AreEqual(header.SourceExtraQuery, DefaultSourceQuery);
- Assert.AreEqual(header.SourceExtraQueryLength, DefaultSourceQuery.Length);
- Assert.AreEqual(header.DestinationResourceId, DefaultDestinationResourceId);
- Assert.AreEqual(header.DestinationPath, DefaultDestinationPath);
- Assert.AreEqual(header.DestinationPathLength, DefaultDestinationPath.Length);
- Assert.AreEqual(header.DestinationExtraQuery, DefaultDestinationQuery);
- Assert.AreEqual(header.DestinationExtraQueryLength, DefaultDestinationQuery.Length);
- Assert.IsFalse(header.IsFinalPart);
- Assert.IsFalse(header.ForceWrite);
- Assert.IsFalse(header.ForceIfReadOnly);
- Assert.IsFalse(header.AutoDecompress);
- Assert.AreEqual(header.Priority, DefaultPriority);
- Assert.AreEqual(header.TTLAfterCompletion, DefaultTtlAfterCompletion);
- Assert.AreEqual(header.JobPlanOperation, DefaultJobPlanOperation);
- Assert.AreEqual(header.FolderPropertyMode, DefaultFolderPropertiesMode);
- Assert.AreEqual(header.NumberChunks, DefaultNumberChunks);
- Assert.AreEqual(header.DstBlobData.BlobType, DefaultBlobType);
- Assert.IsFalse(header.DstBlobData.NoGuessMimeType);
- Assert.AreEqual(header.DstBlobData.ContentType, DefaultContentType);
- Assert.AreEqual(header.DstBlobData.ContentTypeLength, DefaultContentType.Length);
- Assert.AreEqual(header.DstBlobData.ContentEncoding, DefaultContentEncoding);
- Assert.AreEqual(header.DstBlobData.ContentEncodingLength, DefaultContentEncoding.Length);
- Assert.AreEqual(header.DstBlobData.ContentLanguage, DefaultContentLanguage);
- Assert.AreEqual(header.DstBlobData.ContentLanguageLength, DefaultContentLanguage.Length);
- Assert.AreEqual(header.DstBlobData.ContentDisposition, DefaultContentDisposition);
- Assert.AreEqual(header.DstBlobData.ContentDispositionLength, DefaultContentDisposition.Length);
- Assert.AreEqual(header.DstBlobData.CacheControl, DefaultCacheControl);
- Assert.AreEqual(header.DstBlobData.CacheControlLength, DefaultCacheControl.Length);
- Assert.AreEqual(header.DstBlobData.BlockBlobTier, DefaultBlockBlobTier);
- Assert.AreEqual(header.DstBlobData.PageBlobTier, DefaultPageBlobTier);
- Assert.IsFalse(header.DstBlobData.PutMd5);
- string metadataStr = DictionaryToString(metadata);
- Assert.AreEqual(header.DstBlobData.Metadata, metadataStr);
- Assert.AreEqual(header.DstBlobData.MetadataLength, metadataStr.Length);
- string blobTagsStr = DictionaryToString(blobTags);
- Assert.AreEqual(header.DstBlobData.BlobTags, blobTagsStr);
- Assert.AreEqual(header.DstBlobData.BlobTagsLength, blobTagsStr.Length);
- Assert.IsFalse(header.DstBlobData.IsSourceEncrypted);
- Assert.AreEqual(header.DstBlobData.CpkScopeInfo, DefaultCpkScopeInfo);
- Assert.AreEqual(header.DstBlobData.CpkScopeInfoLength, DefaultCpkScopeInfo.Length);
- Assert.AreEqual(header.DstBlobData.BlockSize, DefaultBlockSize);
- Assert.IsFalse(header.DstLocalData.PreserveLastModifiedTime);
- Assert.AreEqual(header.DstLocalData.ChecksumVerificationOption, DefaultChecksumVerificationOption);
- Assert.IsFalse(header.PreserveSMBPermissions);
- Assert.IsFalse(header.PreserveSMBInfo);
- Assert.IsFalse(header.S2SGetPropertiesInBackend);
- Assert.IsFalse(header.S2SSourceChangeValidation);
- Assert.IsFalse(header.DestLengthValidation);
- Assert.AreEqual(header.S2SInvalidMetadataHandleOption, DefaultS2sInvalidMetadataHandleOption);
- Assert.AreEqual(header.DeleteSnapshotsOption, DefaultDeleteSnapshotsOption);
- Assert.AreEqual(header.PermanentDeleteOption, DefaultPermanentDeleteOption);
- Assert.AreEqual(header.RehydratePriorityType, DefaultRehydratePriorityType);
- Assert.AreEqual(header.AtomicJobStatus, DefaultJobStatus);
- Assert.AreEqual(header.AtomicPartStatus, DefaultPartStatus);
+ Assert.AreEqual(DataMovementConstants.JobPartPlanFile.SchemaVersion, header.Version);
+ Assert.AreEqual(DefaultTransferId, header.TransferId);
+ Assert.AreEqual(DefaultPartNumber, header.PartNumber);
+ Assert.AreEqual(DefaultCreateTime, header.CreateTime);
+ Assert.AreEqual(DefaultSourceTypeId, header.SourceTypeId);
+ Assert.AreEqual(DefaultDestinationTypeId, header.DestinationTypeId);
+ Assert.AreEqual(DefaultSourcePath, header.SourcePath);
+ Assert.AreEqual(DefaultDestinationPath, header.DestinationPath);
+ Assert.IsFalse(header.Overwrite);
+ Assert.AreEqual(DefaultInitialTransferSize, header.InitialTransferSize);
+ Assert.AreEqual(DefaultChunkSize, header.ChunkSize);
+ Assert.AreEqual(DefaultPriority, header.Priority);
+ Assert.AreEqual(DefaultPartStatus, header.JobPartStatus);
}
[Test]
- public async Task Serialize()
+ public void Serialize()
{
// Arrange
- IDictionary metadata = DataProvider.BuildMetadata();
- IDictionary blobTags = DataProvider.BuildTags();
-
- JobPartPlanHeader header = CreateDefaultJobPartHeader(
- metadata: metadata,
- blobTags: blobTags);
+ JobPartPlanHeader header = CreateDefaultJobPartHeader();
+ string samplePath = Path.Combine("Resources", "SampleJobPartPlanFile.b3.ndmpart");
- using (Stream stream = new MemoryStream(DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
+ using (MemoryStream headerStream = new MemoryStream())
+ using (FileStream fileStream = File.OpenRead(samplePath))
{
// Act
- header.Serialize(stream);
+ header.Serialize(headerStream);
// Assert
- stream.Position = 0;
-
- int versionSize = DataMovementConstants.JobPartPlanFile.VersionStrNumBytes;
- byte[] versionBuffer = new byte[versionSize];
- await stream.ReadAsync(versionBuffer, 0, versionSize);
- Assert.AreEqual(DataMovementConstants.JobPartPlanFile.SchemaVersion.ToByteArray(versionSize), versionBuffer);
-
- int startTimeSize = DataMovementConstants.LongSizeInBytes;
- byte[] startTimeBuffer = new byte[startTimeSize];
- await stream.ReadAsync(startTimeBuffer, 0, startTimeSize);
- Assert.AreEqual(DefaultStartTime.Ticks.ToByteArray(startTimeSize), startTimeBuffer);
-
- int transferIdSize = DataMovementConstants.JobPartPlanFile.TransferIdStrNumBytes;
- byte[] transferIdBuffer = new byte[transferIdSize];
- await stream.ReadAsync(transferIdBuffer, 0, transferIdSize);
- Assert.AreEqual(DefaultTransferId.ToByteArray(transferIdSize), transferIdBuffer);
-
- int partNumberSize = DataMovementConstants.LongSizeInBytes;
- byte[] partNumberBuffer = new byte[partNumberSize];
- await stream.ReadAsync(partNumberBuffer, 0, partNumberSize);
- Assert.AreEqual(DefaultPartNumber.ToByteArray(partNumberSize), partNumberBuffer);
-
- int sourceResourceIdLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] sourceResourceIdLengthBuffer = new byte[sourceResourceIdLengthSize];
- await stream.ReadAsync(sourceResourceIdLengthBuffer, 0, sourceResourceIdLengthSize);
- Assert.AreEqual(((ushort)DefaultSourceResourceId.Length).ToByteArray(sourceResourceIdLengthSize), sourceResourceIdLengthBuffer);
-
- int sourceResourceIdSize = DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes;
- byte[] sourceResourceIdBuffer = new byte[sourceResourceIdSize];
- await stream.ReadAsync(sourceResourceIdBuffer, 0, sourceResourceIdSize);
- Assert.AreEqual(DefaultSourceResourceId.ToByteArray(sourceResourceIdSize), sourceResourceIdBuffer);
-
- int sourcePathLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] sourcePathLengthBuffer = new byte[sourcePathLengthSize];
- await stream.ReadAsync(sourcePathLengthBuffer, 0, sourcePathLengthSize);
- Assert.AreEqual(((ushort)DefaultSourcePath.Length).ToByteArray(sourcePathLengthSize), sourcePathLengthBuffer);
-
- int sourcePathSize = DataMovementConstants.JobPartPlanFile.PathStrNumBytes;
- byte[] sourcePathBuffer = new byte[sourcePathSize];
- await stream.ReadAsync(sourcePathBuffer, 0, sourcePathSize);
- Assert.AreEqual(DefaultSourcePath.ToByteArray(sourcePathSize), sourcePathBuffer);
-
- int sourceExtraQueryLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] sourceExtraQueryLengthBuffer = new byte[sourceExtraQueryLengthSize];
- await stream.ReadAsync(sourceExtraQueryLengthBuffer, 0, sourceExtraQueryLengthSize);
- Assert.AreEqual(((ushort)DefaultSourceQuery.Length).ToByteArray(sourceExtraQueryLengthSize), sourceExtraQueryLengthBuffer);
-
- int sourceExtraQuerySize = DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes;
- byte[] sourceExtraQueryBuffer = new byte[sourceExtraQuerySize];
- await stream.ReadAsync(sourceExtraQueryBuffer, 0, sourceExtraQuerySize);
- Assert.AreEqual(DefaultSourceQuery.ToByteArray(sourceExtraQuerySize), sourceExtraQueryBuffer);
-
- int destinationResourceIdLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] destinationResourceIdLengthBuffer = new byte[destinationResourceIdLengthSize];
- await stream.ReadAsync(destinationResourceIdLengthBuffer, 0, destinationResourceIdLengthSize);
- Assert.AreEqual(((ushort)DefaultDestinationResourceId.Length).ToByteArray(destinationResourceIdLengthSize), destinationResourceIdLengthBuffer);
-
- int destinationResourceIdSize = DataMovementConstants.JobPartPlanFile.ResourceIdNumBytes;
- byte[] destinationResourceIdBuffer = new byte[destinationResourceIdSize];
- await stream.ReadAsync(destinationResourceIdBuffer, 0, destinationResourceIdSize);
- Assert.AreEqual(DefaultDestinationResourceId.ToByteArray(destinationResourceIdSize), destinationResourceIdBuffer);
-
- int destinationPathLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] destinationPathLengthBuffer = new byte[destinationPathLengthSize];
- await stream.ReadAsync(destinationPathLengthBuffer, 0, destinationPathLengthSize);
- Assert.AreEqual(((ushort)DefaultDestinationPath.Length).ToByteArray(destinationPathLengthSize), destinationPathLengthBuffer);
-
- int destinationPathSize = DataMovementConstants.JobPartPlanFile.PathStrNumBytes;
- byte[] destinationPathBuffer = new byte[destinationPathSize];
- await stream.ReadAsync(destinationPathBuffer, 0, destinationPathSize);
- Assert.AreEqual(DefaultDestinationPath.ToByteArray(destinationPathSize), destinationPathBuffer);
-
- int destinationExtraQueryLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] destinationExtraQueryLengthBuffer = new byte[destinationExtraQueryLengthSize];
- await stream.ReadAsync(destinationExtraQueryLengthBuffer, 0, destinationExtraQueryLengthSize);
- Assert.AreEqual(((ushort)DefaultDestinationQuery.Length).ToByteArray(destinationExtraQueryLengthSize), destinationExtraQueryLengthBuffer);
-
- int destinationExtraQuerySize = DataMovementConstants.JobPartPlanFile.ExtraQueryNumBytes;
- byte[] destinationExtraQueryBuffer = new byte[destinationExtraQuerySize];
- await stream.ReadAsync(destinationExtraQueryBuffer, 0, destinationExtraQuerySize);
- Assert.AreEqual(DefaultDestinationQuery.ToByteArray(destinationExtraQuerySize), destinationExtraQueryBuffer);
-
- int oneByte = DataMovementConstants.OneByte;
- byte[] isFinalPartBuffer = new byte[oneByte];
- await stream.ReadAsync(isFinalPartBuffer, 0, oneByte);
- Assert.AreEqual(0, isFinalPartBuffer[0]);
-
- byte[] forceWriteBuffer = new byte[oneByte];
- await stream.ReadAsync(forceWriteBuffer, 0, forceWriteBuffer.Length);
- Assert.AreEqual(0, forceWriteBuffer[0]);
-
- byte[] forceIfReadOnlyBuffer = new byte[oneByte];
- await stream.ReadAsync(forceIfReadOnlyBuffer, 0, oneByte);
- Assert.AreEqual(0, forceIfReadOnlyBuffer[0]);
-
- byte[] autoDecompressBuffer = new byte[oneByte];
- await stream.ReadAsync(autoDecompressBuffer, 0, oneByte);
- Assert.AreEqual(0, autoDecompressBuffer[0]);
-
- byte[] priorityBuffer = new byte[oneByte];
- await stream.ReadAsync(priorityBuffer, 0, oneByte);
- Assert.AreEqual(0, priorityBuffer[0]);
-
- int ttlAfterCompletionSize = DataMovementConstants.LongSizeInBytes;
- byte[] ttlAfterCompletionBuffer = new byte[ttlAfterCompletionSize];
- await stream.ReadAsync(ttlAfterCompletionBuffer, 0, ttlAfterCompletionSize);
- Assert.AreEqual(DefaultTtlAfterCompletion.Ticks.ToByteArray(ttlAfterCompletionSize), ttlAfterCompletionBuffer);
-
- byte[] fromToBuffer = new byte[oneByte];
- await stream.ReadAsync(fromToBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultJobPlanOperation, fromToBuffer[0]);
-
- byte[] folderPropertyModeBuffer = new byte[oneByte];
- await stream.ReadAsync(folderPropertyModeBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultFolderPropertiesMode, folderPropertyModeBuffer[0]);
-
- int numberChunksSize = DataMovementConstants.LongSizeInBytes;
- byte[] numberChunksBuffer = new byte[numberChunksSize];
- await stream.ReadAsync(numberChunksBuffer, 0, numberChunksSize);
- Assert.AreEqual(DefaultNumberChunks.ToByteArray(numberChunksSize), numberChunksBuffer);
-
- byte[] blobTypeBuffer = new byte[oneByte];
- await stream.ReadAsync(blobTypeBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultBlobType, blobTypeBuffer[0]);
-
- byte[] noGuessMimeTypeBuffer = new byte[oneByte];
- await stream.ReadAsync(noGuessMimeTypeBuffer, 0, oneByte);
- Assert.AreEqual(0, noGuessMimeTypeBuffer[0]);
-
- int contentTypeLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] contentTypeLengthBuffer = new byte[contentTypeLengthSize];
- await stream.ReadAsync(contentTypeLengthBuffer, 0, contentTypeLengthSize);
- Assert.AreEqual(((ushort)DefaultContentType.Length).ToByteArray(contentTypeLengthSize), contentTypeLengthBuffer);
-
- int contentTypeSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] contentTypeBuffer = new byte[contentTypeSize];
- await stream.ReadAsync(contentTypeBuffer, 0, contentTypeSize);
- Assert.AreEqual(DefaultContentType.ToByteArray(contentTypeSize), contentTypeBuffer);
-
- int contentEncodingLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] contentEncodingLengthBuffer = new byte[contentEncodingLengthSize];
- await stream.ReadAsync(contentEncodingLengthBuffer, 0, contentEncodingLengthSize);
- Assert.AreEqual(((ushort)DefaultContentEncoding.Length).ToByteArray(contentEncodingLengthSize), contentEncodingLengthBuffer);
+ BinaryReader reader = new(fileStream);
+ byte[] expected = reader.ReadBytes((int)fileStream.Length);
+ byte[] actual = headerStream.ToArray();
- int contentEncodingSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] contentEncodingBuffer = new byte[contentEncodingSize];
- await stream.ReadAsync(contentEncodingBuffer, 0, contentEncodingSize);
- Assert.AreEqual(DefaultContentEncoding.ToByteArray(contentEncodingSize), contentEncodingBuffer);
-
- int contentLanguageLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] contentLanguageLengthBuffer = new byte[contentLanguageLengthSize];
- await stream.ReadAsync(contentLanguageLengthBuffer, 0, contentLanguageLengthSize);
- Assert.AreEqual(((ushort)DefaultContentLanguage.Length).ToByteArray(contentLanguageLengthSize), contentLanguageLengthBuffer);
-
- int contentLanguageSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] contentLanguageBuffer = new byte[contentLanguageSize];
- await stream.ReadAsync(contentLanguageBuffer, 0, contentLanguageSize);
- Assert.AreEqual(DefaultContentLanguage.ToByteArray(contentLanguageSize), contentLanguageBuffer);
-
- int contentDispositionLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] contentDispositionLengthBuffer = new byte[contentDispositionLengthSize];
- await stream.ReadAsync(contentDispositionLengthBuffer, 0, contentDispositionLengthSize);
- Assert.AreEqual(((ushort)DefaultContentDisposition.Length).ToByteArray(contentDispositionLengthSize), contentDispositionLengthBuffer);
-
- int contentDispositionSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] contentDispositionBuffer = new byte[contentDispositionSize];
- await stream.ReadAsync(contentDispositionBuffer, 0, contentDispositionSize);
- Assert.AreEqual(DefaultContentDisposition.ToByteArray(contentDispositionSize), contentDispositionBuffer);
-
- int cacheControlLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] cacheControlLengthBuffer = new byte[cacheControlLengthSize];
- await stream.ReadAsync(cacheControlLengthBuffer, 0, cacheControlLengthSize);
- Assert.AreEqual(((ushort)DefaultCacheControl.Length).ToByteArray(cacheControlLengthSize), cacheControlLengthBuffer);
-
- int cacheControlSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] cacheControlBuffer = new byte[cacheControlSize];
- await stream.ReadAsync(cacheControlBuffer, 0, cacheControlSize);
- Assert.AreEqual(DefaultCacheControl.ToByteArray(cacheControlSize), cacheControlBuffer);
-
- byte[] blockBlobTierBuffer = new byte[oneByte];
- await stream.ReadAsync(blockBlobTierBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultBlockBlobTier, blockBlobTierBuffer[0]);
-
- byte[] pageBlobTierBuffer = new byte[oneByte];
- await stream.ReadAsync(pageBlobTierBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultPageBlobTier, pageBlobTierBuffer[0]);
-
- byte[] putMd5Buffer = new byte[oneByte];
- await stream.ReadAsync(putMd5Buffer, 0, oneByte);
- Assert.AreEqual(0, putMd5Buffer[0]);
-
- string metadataStr = DictionaryToString(metadata);
- int metadataLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] metadataLengthBuffer = new byte[metadataLengthSize];
- await stream.ReadAsync(metadataLengthBuffer, 0, metadataLengthSize);
- Assert.AreEqual(((ushort)metadataStr.Length).ToByteArray(metadataLengthSize), metadataLengthBuffer);
-
- int metadataSize = DataMovementConstants.JobPartPlanFile.MetadataStrNumBytes;
- byte[] metadataBuffer = new byte[metadataSize];
- await stream.ReadAsync(metadataBuffer, 0, metadataSize);
- Assert.AreEqual(metadataStr.ToByteArray(metadataSize), metadataBuffer);
-
- string blobTagsStr = DictionaryToString(blobTags);
- int blobTagsLengthSize = DataMovementConstants.LongSizeInBytes;
- byte[] blobTagsLengthBuffer = new byte[blobTagsLengthSize];
- await stream.ReadAsync(blobTagsLengthBuffer, 0, blobTagsLengthSize);
- Assert.AreEqual(((long)blobTagsStr.Length).ToByteArray(blobTagsLengthSize), blobTagsLengthBuffer);
-
- int blobTagsSize = DataMovementConstants.JobPartPlanFile.BlobTagsStrNumBytes;
- byte[] blobTagsBuffer = new byte[blobTagsSize];
- await stream.ReadAsync(blobTagsBuffer, 0, blobTagsSize);
- Assert.AreEqual(blobTagsStr.ToByteArray(blobTagsSize), blobTagsBuffer);
-
- byte[] isSourceEncryptedBuffer = new byte[oneByte];
- await stream.ReadAsync(isSourceEncryptedBuffer, 0, oneByte);
- Assert.AreEqual(0, isSourceEncryptedBuffer[0]);
-
- int cpkScopeInfoLengthSize = DataMovementConstants.UShortSizeInBytes;
- byte[] cpkScopeInfoLengthBuffer = new byte[cpkScopeInfoLengthSize];
- await stream.ReadAsync(cpkScopeInfoLengthBuffer, 0, cpkScopeInfoLengthSize);
- Assert.AreEqual(((ushort)DefaultCpkScopeInfo.Length).ToByteArray(cpkScopeInfoLengthSize), cpkScopeInfoLengthBuffer);
-
- int cpkScopeInfoSize = DataMovementConstants.JobPartPlanFile.HeaderValueNumBytes;
- byte[] cpkScopeInfoBuffer = new byte[cpkScopeInfoSize];
- await stream.ReadAsync(cpkScopeInfoBuffer, 0, cpkScopeInfoSize);
- Assert.AreEqual(DefaultCpkScopeInfo.ToByteArray(cpkScopeInfoSize), cpkScopeInfoBuffer);
-
- int blockSizeLengthSize = DataMovementConstants.LongSizeInBytes;
- byte[] blockSizeLengthBuffer = new byte[blockSizeLengthSize];
- await stream.ReadAsync(blockSizeLengthBuffer, 0, blockSizeLengthSize);
- Assert.AreEqual(DefaultBlockSize.ToByteArray(blockSizeLengthSize), blockSizeLengthBuffer);
-
- byte[] preserveLastModifiedTimeBuffer = new byte[oneByte];
- await stream.ReadAsync(preserveLastModifiedTimeBuffer, 0, oneByte);
- Assert.AreEqual(0, preserveLastModifiedTimeBuffer[0]);
-
- byte[] checksumVerificationOptionBuffer = new byte[oneByte];
- await stream.ReadAsync(checksumVerificationOptionBuffer, 0, oneByte);
- Assert.AreEqual(DefaultChecksumVerificationOption, checksumVerificationOptionBuffer[0]);
-
- byte[] preserveSMBPermissionsBuffer = new byte[oneByte];
- await stream.ReadAsync(preserveSMBPermissionsBuffer, 0, oneByte);
- Assert.AreEqual(0, preserveSMBPermissionsBuffer[0]);
-
- byte[] preserveSMBInfoBuffer = new byte[oneByte];
- await stream.ReadAsync(preserveSMBInfoBuffer, 0, oneByte);
- Assert.AreEqual(0, preserveSMBInfoBuffer[0]);
-
- byte[] s2sGetPropertiesInBackendBuffer = new byte[oneByte];
- await stream.ReadAsync(s2sGetPropertiesInBackendBuffer, 0, oneByte);
- Assert.AreEqual(0, s2sGetPropertiesInBackendBuffer[0]);
-
- byte[] s2sSourceChangeValidationBuffer = new byte[oneByte];
- await stream.ReadAsync(s2sSourceChangeValidationBuffer, 0, oneByte);
- Assert.AreEqual(0, s2sSourceChangeValidationBuffer[0]);
-
- byte[] destLengthValidationBuffer = new byte[oneByte];
- await stream.ReadAsync(destLengthValidationBuffer, 0, oneByte);
- Assert.AreEqual(0, destLengthValidationBuffer[0]);
-
- byte[] s2sInvalidMetadataHandleOptionBuffer = new byte[oneByte];
- await stream.ReadAsync(s2sInvalidMetadataHandleOptionBuffer, 0, oneByte);
- Assert.AreEqual(DefaultS2sInvalidMetadataHandleOption, s2sInvalidMetadataHandleOptionBuffer[0]);
-
- byte[] deleteSnapshotsOptionBuffer = new byte[oneByte];
- await stream.ReadAsync(deleteSnapshotsOptionBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultDeleteSnapshotsOption, deleteSnapshotsOptionBuffer[0]);
-
- byte[] permanentDeleteOptionBuffer = new byte[oneByte];
- await stream.ReadAsync(permanentDeleteOptionBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultPermanentDeleteOption, permanentDeleteOptionBuffer[0]);
-
- byte[] rehydratePriorityTypeBuffer = new byte[oneByte];
- await stream.ReadAsync(rehydratePriorityTypeBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultRehydratePriorityType, rehydratePriorityTypeBuffer[0]);
-
- byte[] atomicJobStateBuffer = new byte[oneByte];
- await stream.ReadAsync(atomicJobStateBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultJobStatus.State, atomicJobStateBuffer[0]);
-
- byte[] atomicJobHasFailedItemsBuffer = new byte[oneByte];
- await stream.ReadAsync(atomicJobHasFailedItemsBuffer, 0, oneByte);
- Assert.AreEqual(Convert.ToByte(DefaultJobStatus.HasFailedItems), atomicJobHasFailedItemsBuffer[0]);
-
- byte[] atomicJobHasSkippedItemsBuffer = new byte[oneByte];
- await stream.ReadAsync(atomicJobHasSkippedItemsBuffer, 0, oneByte);
- Assert.AreEqual(Convert.ToByte(DefaultJobStatus.HasSkippedItems), atomicJobHasSkippedItemsBuffer[0]);
-
- byte[] atomicPartStateBuffer = new byte[oneByte];
- await stream.ReadAsync(atomicPartStateBuffer, 0, oneByte);
- Assert.AreEqual((byte)DefaultPartStatus.State, atomicPartStateBuffer[0]);
-
- byte[] atomiPartHasFailedItemsBuffer = new byte[oneByte];
- await stream.ReadAsync(atomiPartHasFailedItemsBuffer, 0, oneByte);
- Assert.AreEqual(Convert.ToByte(DefaultJobStatus.HasFailedItems), atomiPartHasFailedItemsBuffer[0]);
-
- byte[] atomicPartHasSkippedItemsBuffer = new byte[oneByte];
- await stream.ReadAsync(atomicPartHasSkippedItemsBuffer, 0, oneByte);
- Assert.AreEqual(Convert.ToByte(DefaultJobStatus.HasSkippedItems), atomicPartHasSkippedItemsBuffer[0]);
+ CollectionAssert.AreEqual(expected, actual);
}
}
@@ -434,22 +70,16 @@ public void Serialize_Error()
}
[Test]
- public void Deserialize()
+ public void Deserialize()
{
- // Arrange
- IDictionary metadata = DataProvider.BuildMetadata();
- IDictionary blobTags = DataProvider.BuildTags();
-
- JobPartPlanHeader header = CreateDefaultJobPartHeader(
- metadata: metadata,
- blobTags: blobTags);
+ JobPartPlanHeader header = CreateDefaultJobPartHeader();
- using (Stream stream = new MemoryStream(DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
+ using (Stream stream = new MemoryStream())
{
header.Serialize(stream);
// Act / Assert
- DeserializeAndVerify(stream, DataMovementConstants.JobPartPlanFile.SchemaVersion, metadata, blobTags);
+ DeserializeAndVerify(stream, DataMovementConstants.JobPartPlanFile.SchemaVersion);
}
}
@@ -485,97 +115,42 @@ public void Deserialize_File_Version_b2()
public void Deserialize_File_Version_b3()
{
// Arrange
- string samplePath = Path.Combine("Resources", "SampleJobPartPlanFile.steVb3");
+ string samplePath = Path.Combine("Resources", "SampleJobPartPlanFile.b3.ndmpart");
using (FileStream stream = File.OpenRead(samplePath))
{
// Act / Assert
- DeserializeAndVerify(stream, DataMovementConstants.JobPartPlanFile.SchemaVersion_b3, DataProvider.BuildMetadata(), DataProvider.BuildTags());
+ DeserializeAndVerify(stream, DataMovementConstants.JobPartPlanFile.SchemaVersion_b3);
}
}
[Test]
public void Deserialize_Error()
{
- // Arrange
- JobPartPlanHeader header = CreateDefaultJobPartHeader();
-
// Act / Assert
Assert.Catch(
() => JobPartPlanHeader.Deserialize(default));
}
private void DeserializeAndVerify(
- Stream stream,
- string schemaVersion,
- IDictionary metadata,
- IDictionary blobTags)
+ Stream stream,
+ string schemaVersion)
{
JobPartPlanHeader deserializedHeader = JobPartPlanHeader.Deserialize(stream);
// Assert
- Assert.AreEqual(deserializedHeader.Version, schemaVersion);
- Assert.AreEqual(deserializedHeader.StartTime, DefaultStartTime);
- Assert.AreEqual(deserializedHeader.TransferId, DefaultTransferId);
- Assert.AreEqual(deserializedHeader.PartNumber, DefaultPartNumber);
- Assert.AreEqual(deserializedHeader.SourcePath, DefaultSourcePath);
- Assert.AreEqual(deserializedHeader.SourcePathLength, DefaultSourcePath.Length);
- Assert.AreEqual(deserializedHeader.SourceExtraQuery, DefaultSourceQuery);
- Assert.AreEqual(deserializedHeader.SourceExtraQueryLength, DefaultSourceQuery.Length);
- Assert.AreEqual(deserializedHeader.DestinationPath, DefaultDestinationPath);
- Assert.AreEqual(deserializedHeader.DestinationPathLength, DefaultDestinationPath.Length);
- Assert.AreEqual(deserializedHeader.DestinationExtraQuery, DefaultDestinationQuery);
- Assert.AreEqual(deserializedHeader.DestinationExtraQueryLength, DefaultDestinationQuery.Length);
- Assert.IsFalse(deserializedHeader.IsFinalPart);
- Assert.IsFalse(deserializedHeader.ForceWrite);
- Assert.IsFalse(deserializedHeader.ForceIfReadOnly);
- Assert.IsFalse(deserializedHeader.AutoDecompress);
- Assert.AreEqual(deserializedHeader.Priority, DefaultPriority);
- Assert.AreEqual(deserializedHeader.TTLAfterCompletion, DefaultTtlAfterCompletion);
- Assert.AreEqual(deserializedHeader.JobPlanOperation, DefaultJobPlanOperation);
- Assert.AreEqual(deserializedHeader.FolderPropertyMode, DefaultFolderPropertiesMode);
- Assert.AreEqual(deserializedHeader.NumberChunks, DefaultNumberChunks);
- Assert.AreEqual(deserializedHeader.DstBlobData.BlobType, DefaultBlobType);
- Assert.IsFalse(deserializedHeader.DstBlobData.NoGuessMimeType);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentType, DefaultContentType);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentTypeLength, DefaultContentType.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentEncoding, DefaultContentEncoding);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentEncodingLength, DefaultContentEncoding.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentLanguage, DefaultContentLanguage);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentLanguageLength, DefaultContentLanguage.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentDisposition, DefaultContentDisposition);
- Assert.AreEqual(deserializedHeader.DstBlobData.ContentDispositionLength, DefaultContentDisposition.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.CacheControl, DefaultCacheControl);
- Assert.AreEqual(deserializedHeader.DstBlobData.CacheControlLength, DefaultCacheControl.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.BlockBlobTier, DefaultBlockBlobTier);
- Assert.AreEqual(deserializedHeader.DstBlobData.PageBlobTier, DefaultPageBlobTier);
- Assert.IsFalse(deserializedHeader.DstBlobData.PutMd5);
- string metadataStr = DictionaryToString(metadata);
- Assert.AreEqual(deserializedHeader.DstBlobData.Metadata, metadataStr);
- Assert.AreEqual(deserializedHeader.DstBlobData.MetadataLength, metadataStr.Length);
- string blobTagsStr = DictionaryToString(blobTags);
- Assert.AreEqual(deserializedHeader.DstBlobData.BlobTags, blobTagsStr);
- Assert.AreEqual(deserializedHeader.DstBlobData.BlobTagsLength, blobTagsStr.Length);
- Assert.IsFalse(deserializedHeader.DstBlobData.IsSourceEncrypted);
- Assert.AreEqual(deserializedHeader.DstBlobData.CpkScopeInfo, DefaultCpkScopeInfo);
- Assert.AreEqual(deserializedHeader.DstBlobData.CpkScopeInfoLength, DefaultCpkScopeInfo.Length);
- Assert.AreEqual(deserializedHeader.DstBlobData.BlockSize, DefaultBlockSize);
- Assert.IsFalse(deserializedHeader.DstLocalData.PreserveLastModifiedTime);
- Assert.AreEqual(deserializedHeader.DstLocalData.ChecksumVerificationOption, DefaultChecksumVerificationOption);
- Assert.IsFalse(deserializedHeader.PreserveSMBPermissions);
- Assert.IsFalse(deserializedHeader.PreserveSMBInfo);
- Assert.IsFalse(deserializedHeader.S2SGetPropertiesInBackend);
- Assert.IsFalse(deserializedHeader.S2SSourceChangeValidation);
- Assert.IsFalse(deserializedHeader.DestLengthValidation);
- Assert.AreEqual(deserializedHeader.S2SInvalidMetadataHandleOption, DefaultS2sInvalidMetadataHandleOption);
- Assert.AreEqual(deserializedHeader.DeleteSnapshotsOption, DefaultDeleteSnapshotsOption);
- Assert.AreEqual(deserializedHeader.PermanentDeleteOption, DefaultPermanentDeleteOption);
- Assert.AreEqual(deserializedHeader.RehydratePriorityType, DefaultRehydratePriorityType);
- Assert.AreEqual(DefaultJobStatus.State, deserializedHeader.AtomicJobStatus.State);
- Assert.AreEqual(DefaultJobStatus.HasFailedItems, deserializedHeader.AtomicJobStatus.HasFailedItems);
- Assert.AreEqual(DefaultJobStatus.HasSkippedItems, deserializedHeader.AtomicJobStatus.HasSkippedItems);
- Assert.AreEqual(DefaultPartStatus.State, deserializedHeader.AtomicPartStatus.State);
- Assert.AreEqual(DefaultPartStatus.HasFailedItems, deserializedHeader.AtomicPartStatus.HasFailedItems);
- Assert.AreEqual(DefaultPartStatus.HasSkippedItems, deserializedHeader.AtomicPartStatus.HasSkippedItems);
+ Assert.AreEqual(schemaVersion, deserializedHeader.Version);
+ Assert.AreEqual(DefaultTransferId, deserializedHeader.TransferId);
+ Assert.AreEqual(DefaultPartNumber, deserializedHeader.PartNumber);
+ Assert.AreEqual(DefaultCreateTime, deserializedHeader.CreateTime);
+ Assert.AreEqual(DefaultSourceTypeId, deserializedHeader.SourceTypeId);
+ Assert.AreEqual(DefaultDestinationTypeId, deserializedHeader.DestinationTypeId);
+ Assert.AreEqual(DefaultSourcePath, deserializedHeader.SourcePath);
+ Assert.AreEqual(DefaultDestinationPath, deserializedHeader.DestinationPath);
+ Assert.IsFalse(deserializedHeader.Overwrite);
+ Assert.AreEqual(DefaultInitialTransferSize, deserializedHeader.InitialTransferSize);
+ Assert.AreEqual(DefaultChunkSize, deserializedHeader.ChunkSize);
+ Assert.AreEqual(DefaultPriority, deserializedHeader.Priority);
+ Assert.AreEqual(DefaultPartStatus, deserializedHeader.JobPartStatus);
}
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/JobPlanFileTests.cs b/sdk/storage/Azure.Storage.DataMovement/tests/JobPlanFileTests.cs
index 0f88510834411..f1f1955c15b78 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/JobPlanFileTests.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/JobPlanFileTests.cs
@@ -32,7 +32,7 @@ public async Task CreateJobPlanFileAsync()
id: transferId,
headerStream: stream);
}
- string filePath = Path.Combine(test.DirectoryPath, $"{transferId}.{DataMovementConstants.JobPlanFile.FileExtension}");
+ string filePath = Path.Combine(test.DirectoryPath, $"{transferId}{DataMovementConstants.JobPlanFile.FileExtension}");
Assert.NotNull(file);
Assert.AreEqual(transferId, file.Id);
@@ -46,7 +46,7 @@ public async Task LoadExistingJobPlanFile()
string transferId = GetNewTransferId();
// Setup existing job plan file
- string filePath = Path.Combine(test.DirectoryPath, $"{transferId}.{DataMovementConstants.JobPlanFile.FileExtension}");
+ string filePath = Path.Combine(test.DirectoryPath, $"{transferId}{DataMovementConstants.JobPlanFile.FileExtension}");
var data = Encoding.UTF8.GetBytes("Hello World!");
using (FileStream fileStream = File.OpenWrite(filePath))
{
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerFactory.cs b/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerFactory.cs
index 8d513542bd19c..d3e136a4ed89c 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerFactory.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerFactory.cs
@@ -12,40 +12,10 @@ namespace Azure.Storage.DataMovement.Tests
internal class LocalTransferCheckpointerFactory
{
internal const int _partCountDefault = 5;
- internal const string _testTransferId =
- "c591bacc-5552-4c5c-b068-552685ec5cd5";
- internal const long _testPartNumber = 5;
- internal static readonly DateTimeOffset _testStartTime
- = new DateTimeOffset(2023, 03, 13, 15, 24, 6, default);
internal const string _testSourceProviderId = "test";
- internal const string _testSourceResourceId = "LocalFile";
internal const string _testSourcePath = "C:/sample-source";
- internal const string _testSourceQuery = "sourcequery";
internal const string _testDestinationProviderId = "test";
- internal const string _testDestinationResourceId = "LocalFile";
internal const string _testDestinationPath = "C:/sample-destination";
- internal const string _testDestinationQuery = "destquery";
- internal const byte _testPriority = 0;
- internal static readonly DateTimeOffset _testTtlAfterCompletion = DateTimeOffset.MaxValue;
- internal const JobPlanOperation _testJobPlanOperation = JobPlanOperation.Upload;
- internal const FolderPropertiesMode _testFolderPropertiesMode = FolderPropertiesMode.None;
- internal const long _testNumberChunks = 1;
- internal const JobPlanBlobType _testBlobType = JobPlanBlobType.BlockBlob;
- internal const string _testContentType = "ContentType / type";
- internal const string _testContentEncoding = "UTF8";
- internal const string _testContentLanguage = "content-language";
- internal const string _testContentDisposition = "content-disposition";
- internal const string _testCacheControl = "cache-control";
- internal const JobPartPlanBlockBlobTier _testBlockBlobTier = JobPartPlanBlockBlobTier.None;
- internal const JobPartPlanPageBlobTier _testPageBlobTier = JobPartPlanPageBlobTier.None;
- internal const string _testCpkScopeInfo = "cpk-scope-info";
- internal const long _testBlockSize = 4 * Constants.KB;
- internal const byte _testS2sInvalidMetadataHandleOption = 0;
- internal const byte _testChecksumVerificationOption = 0;
- internal const JobPartDeleteSnapshotsOption _testDeleteSnapshotsOption = JobPartDeleteSnapshotsOption.None;
- internal const JobPartPermanentDeleteOption _testPermanentDeleteOption = JobPartPermanentDeleteOption.None;
- internal const JobPartPlanRehydratePriorityType _testRehydratePriorityType = JobPartPlanRehydratePriorityType.None;
- internal static readonly DataTransferStatus _testJobStatus = new DataTransferStatus(DataTransferState.Queued, false, false);
internal static readonly DataTransferStatus _testPartStatus = new DataTransferStatus(DataTransferState.Queued, false, false);
private string _checkpointerPath;
@@ -97,8 +67,8 @@ internal void CreateStubJobPartPlanFilesAsync(
DataTransferStatus status = default,
List sourcePaths = default,
List destinationPaths = default,
- string sourceResourceId = "LocalFile",
- string destinationResourceId = "LocalFile")
+ string sourceTypeId = "LocalFile",
+ string destinationTypeId = "LocalFile")
{
status ??= _testPartStatus;
// Populate sourcePaths if not provided
@@ -126,19 +96,18 @@ internal void CreateStubJobPartPlanFilesAsync(
{
// Populate the JobPlanFile with a pseudo job plan header
- JobPartPlanHeader header = CreateDefaultJobPartHeader(
+ JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: partNumber,
- sourceResourceId: sourceResourceId,
+ sourceTypeId: sourceTypeId,
+ destinationTypeId: destinationTypeId,
sourcePath: sourcePaths.ElementAt(partNumber),
- destinationResourceId: destinationResourceId,
destinationPath: destinationPaths.ElementAt(partNumber),
- atomicJobStatus: status,
- atomicPartStatus: status);
+ jobPartStatus: status);
JobPartPlanFileName fileName = new JobPartPlanFileName(checkpointerPath, transferId, partNumber);
- using (FileStream stream = File.Create(fileName.FullPath, DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
+ using (FileStream stream = File.Create(fileName.FullPath))
{
header.Serialize(stream);
}
@@ -176,146 +145,13 @@ internal void CreateStubJobPlanFile(
sourceCheckpointData,
destinationCheckpointData);
- string filePath = Path.Combine(checkpointPath, $"{transferId}.{DataMovementConstants.JobPlanFile.FileExtension}");
+ string filePath = Path.Combine(checkpointPath, $"{transferId}{DataMovementConstants.JobPlanFile.FileExtension}");
using (FileStream stream = File.Create(filePath))
{
header.Serialize(stream);
}
}
- internal JobPartPlanHeader CreateDefaultJobPartHeader(
- string version = DataMovementConstants.JobPartPlanFile.SchemaVersion,
- DateTimeOffset startTime = default,
- string transferId = _testTransferId,
- long partNumber = _testPartNumber,
- string sourceResourceId = _testSourceResourceId,
- string sourcePath = _testSourcePath,
- string sourceExtraQuery = _testSourceQuery,
- string destinationResourceId = _testDestinationResourceId,
- string destinationPath = _testDestinationPath,
- string destinationExtraQuery = _testDestinationQuery,
- bool isFinalPart = false,
- bool forceWrite = false,
- bool forceIfReadOnly = false,
- bool autoDecompress = false,
- byte priority = _testPriority,
- DateTimeOffset ttlAfterCompletion = default,
- JobPlanOperation fromTo = _testJobPlanOperation,
- FolderPropertiesMode folderPropertyMode = _testFolderPropertiesMode,
- long numberChunks = _testNumberChunks,
- JobPlanBlobType blobType = _testBlobType,
- bool noGuessMimeType = false,
- string contentType = _testContentType,
- string contentEncoding = _testContentEncoding,
- string contentLanguage = _testContentLanguage,
- string contentDisposition = _testContentDisposition,
- string cacheControl = _testCacheControl,
- JobPartPlanBlockBlobTier blockBlobTier = _testBlockBlobTier,
- JobPartPlanPageBlobTier pageBlobTier = _testPageBlobTier,
- bool putMd5 = false,
- IDictionary metadata = default,
- IDictionary blobTags = default,
- bool isSourceEncrypted = false,
- string cpkScopeInfo = _testCpkScopeInfo,
- long blockSize = _testBlockSize,
- bool preserveLastModifiedTime = false,
- byte checksumVerificationOption = _testChecksumVerificationOption,
- bool preserveSMBPermissions = false,
- bool preserveSMBInfo = false,
- bool s2sGetPropertiesInBackend = false,
- bool s2sSourceChangeValidation = false,
- bool destLengthValidation = false,
- byte s2sInvalidMetadataHandleOption = _testS2sInvalidMetadataHandleOption,
- JobPartDeleteSnapshotsOption deleteSnapshotsOption = _testDeleteSnapshotsOption,
- JobPartPermanentDeleteOption permanentDeleteOption = _testPermanentDeleteOption,
- JobPartPlanRehydratePriorityType rehydratePriorityType = _testRehydratePriorityType,
- DataTransferStatus atomicJobStatus = default,
- DataTransferStatus atomicPartStatus = default)
- {
- atomicJobStatus ??= _testJobStatus;
- atomicPartStatus ??= _testPartStatus;
- if (startTime == default)
- {
- startTime = _testStartTime;
- }
- if (ttlAfterCompletion == default)
- {
- ttlAfterCompletion = _testTtlAfterCompletion;
- }
- metadata ??= BuildMetadata();
- blobTags ??= BuildTags();
-
- JobPartPlanDestinationBlob dstBlobData = new JobPartPlanDestinationBlob(
- blobType: blobType,
- noGuessMimeType: noGuessMimeType,
- contentType: contentType,
- contentEncoding: contentEncoding,
- contentLanguage: contentLanguage,
- contentDisposition: contentDisposition,
- cacheControl: cacheControl,
- blockBlobTier: blockBlobTier,
- pageBlobTier: pageBlobTier,
- putMd5: putMd5,
- metadata: metadata,
- blobTags: blobTags,
- isSourceEncrypted: isSourceEncrypted,
- cpkScopeInfo: cpkScopeInfo,
- blockSize: blockSize);
-
- JobPartPlanDestinationLocal dstLocalData = new JobPartPlanDestinationLocal(
- preserveLastModifiedTime: preserveLastModifiedTime,
- checksumVerificationOption: checksumVerificationOption);
-
- return new JobPartPlanHeader(
- version: version,
- startTime: startTime,
- transferId: transferId,
- partNumber: partNumber,
- sourceResourceId: sourceResourceId,
- sourcePath: sourcePath,
- sourceExtraQuery: sourceExtraQuery,
- destinationResourceId: destinationResourceId,
- destinationPath: destinationPath,
- destinationExtraQuery: destinationExtraQuery,
- isFinalPart: isFinalPart,
- forceWrite: forceWrite,
- forceIfReadOnly: forceIfReadOnly,
- autoDecompress: autoDecompress,
- priority: priority,
- ttlAfterCompletion: ttlAfterCompletion,
- jobPlanOperation: fromTo,
- folderPropertyMode: folderPropertyMode,
- numberChunks: numberChunks,
- dstBlobData: dstBlobData,
- dstLocalData: dstLocalData,
- preserveSMBPermissions: preserveSMBPermissions,
- preserveSMBInfo: preserveSMBInfo,
- s2sGetPropertiesInBackend: s2sGetPropertiesInBackend,
- s2sSourceChangeValidation: s2sSourceChangeValidation,
- destLengthValidation: destLengthValidation,
- s2sInvalidMetadataHandleOption: s2sInvalidMetadataHandleOption,
- deleteSnapshotsOption: deleteSnapshotsOption,
- permanentDeleteOption: permanentDeleteOption,
- rehydratePriorityType: rehydratePriorityType,
- atomicJobStatus: atomicJobStatus,
- atomicPartStatus: atomicPartStatus);
- }
-
public static string GetNewTransferId() => Guid.NewGuid().ToString();
- private IDictionary BuildMetadata()
- => new Dictionary(StringComparer.OrdinalIgnoreCase)
- {
- { "foo", "bar" },
- { "meta", "data" },
- { "Capital", "letter" },
- { "UPPER", "case" }
- };
-
- private Dictionary BuildTags()
- => new Dictionary
- {
- { "tagKey0", "tagValue0" },
- { "tagKey1", "tagValue1" }
- };
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerTests.cs b/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerTests.cs
index ca431b3208b8e..65797ee304285 100644
--- a/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerTests.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/tests/LocalTransferCheckpointerTests.cs
@@ -66,7 +66,7 @@ internal void CreateStubJobPartPlanFile(
JobPartPlanFileName fileName = new JobPartPlanFileName(checkpointerPath, transferId, i);
- using (FileStream stream = File.Create(fileName.FullPath, DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
+ using (FileStream stream = File.Create(fileName.FullPath))
{
header.Serialize(stream);
}
@@ -197,7 +197,6 @@ public async Task AddNewJobPartAsync()
// Arrange
string transferId = GetNewTransferId();
int partNumber = 0;
- int chunksTotal = 1;
JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: partNumber);
@@ -214,7 +213,6 @@ public async Task AddNewJobPartAsync()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream);
}
@@ -226,15 +224,7 @@ await transferCheckpointer.AddNewJobPartAsync(
int partCount = await transferCheckpointer.CurrentJobPartCountAsync(transferId);
Assert.AreEqual(1, partCount);
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: partNumber,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header, stream);
- }
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, partNumber, header);
}
[Test]
@@ -244,7 +234,6 @@ public async Task AddNewJobPartAsync_Error()
string transferId = GetNewTransferId();
int partNumber = 0;
- int chunksTotal = 1;
JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: partNumber);
@@ -260,7 +249,6 @@ public async Task AddNewJobPartAsync_Error()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream);
// Add the same job part twice
@@ -268,7 +256,6 @@ await transferCheckpointer.AddNewJobPartAsync(
async () => await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream));
}
@@ -288,7 +275,6 @@ public async Task AddNewJobPartAsync_MultipleParts()
// Add multiple parts for the same job
string transferId = GetNewTransferId();
- int chunksTotal = 1;
JobPartPlanHeader header1 = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: 0);
@@ -313,7 +299,6 @@ public async Task AddNewJobPartAsync_MultipleParts()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 0,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -323,7 +308,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 1,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -333,7 +317,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 2,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -343,7 +326,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 3,
- chunksTotal: chunksTotal,
headerStream: stream);
}
@@ -352,45 +334,10 @@ await transferCheckpointer.AddNewJobPartAsync(
Assert.AreEqual(1, transferIds.Count);
Assert.IsTrue(transferIds.Contains(transferId));
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 0,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header1, stream);
- }
-
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 1,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header2, stream);
- }
-
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 2,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header3, stream);
- }
-
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 3,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header4, stream);
- }
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, 0, header1);
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, 1, header2);
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, 2, header3);
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, 3, header4);
}
[Test]
@@ -413,7 +360,6 @@ public async Task AddNewJobPartAsync_AddAfterRemove()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 1,
- chunksTotal: 2,
headerStream: stream);
}
await transferCheckpointer.TryRemoveStoredTransferAsync(transferId);
@@ -425,7 +371,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 1,
- chunksTotal: 2,
headerStream: stream);
}
@@ -434,15 +379,7 @@ await transferCheckpointer.AddNewJobPartAsync(
Assert.AreEqual(1, transferIds.Count);
Assert.IsTrue(transferIds.Contains(transferId));
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 1,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header, stream);
- }
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, 1, header);
}
[Test]
@@ -615,7 +552,6 @@ public async Task CurrentJobPartCountAsync_OneJob()
// Arrange
string transferId = GetNewTransferId();
int partNumber = 0;
- int chunksTotal = 1;
JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: partNumber);
@@ -631,7 +567,6 @@ public async Task CurrentJobPartCountAsync_OneJob()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream);
}
@@ -649,7 +584,6 @@ public async Task CurrentJobPartCountAsync_MultipleJobs()
// Arrange
string transferId = GetNewTransferId();
- int chunksTotal = 1;
JobPartPlanHeader header1 = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: 0);
@@ -674,7 +608,6 @@ public async Task CurrentJobPartCountAsync_MultipleJobs()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 0,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -684,7 +617,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 1,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -694,7 +626,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 2,
- chunksTotal: chunksTotal,
headerStream: stream);
}
using (Stream stream = new MemoryStream())
@@ -704,7 +635,6 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: 3,
- chunksTotal: chunksTotal,
headerStream: stream);
}
@@ -793,7 +723,6 @@ public async Task ReadJobPartPlanFileAsync()
// Arrange
string transferId = GetNewTransferId();
int partNumber = 0;
- int chunksTotal = 1;
JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
transferId: transferId,
partNumber: partNumber);
@@ -808,20 +737,11 @@ public async Task ReadJobPartPlanFileAsync()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream);
}
// Act
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: partNumber,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- // Assert
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header, stream);
- }
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, partNumber, header);
}
[Test]
@@ -841,7 +761,7 @@ public void ReadJobPartPlanFileAsync_Error()
transferId: transferId,
partNumber: partNumber,
offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes));
+ length: 0));
}
[Test]
@@ -917,148 +837,24 @@ public async Task SetJobTransferStatusAsync()
// Arrange
string transferId = GetNewTransferId();
- int partNumber = 0;
- int chunksTotal = 1;
DataTransferStatus newStatus = SuccessfulCompletedStatus;
- JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: partNumber);
TransferCheckpointer transferCheckpointer = new LocalTransferCheckpointer(test.DirectoryPath);
-
await AddJobToCheckpointer(transferCheckpointer, transferId);
- using (MemoryStream stream = new MemoryStream())
- {
- header.Serialize(stream);
-
- await transferCheckpointer.AddNewJobPartAsync(
- transferId: transferId,
- partNumber: partNumber,
- chunksTotal: chunksTotal,
- headerStream: stream);
- }
// Act
await transferCheckpointer.SetJobTransferStatusAsync(transferId, newStatus);
// Assert
- header.AtomicJobStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: partNumber,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header, stream);
- }
- }
-
- [Test]
- public async Task SetJobTransferStatusAsync_MultipleParts()
- {
- using DisposingLocalDirectory test = DisposingLocalDirectory.GetTestDirectory();
-
- // Arrange
- string transferId = GetNewTransferId();
- int chunksTotal = 1;
- DataTransferStatus newStatus = SuccessfulCompletedStatus;
- JobPartPlanHeader header1 = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: 0);
- JobPartPlanHeader header2 = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: 1);
- JobPartPlanHeader header3 = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: 2);
- JobPartPlanHeader header4 = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: 3);
-
- TransferCheckpointer transferCheckpointer = new LocalTransferCheckpointer(test.DirectoryPath);
-
- await AddJobToCheckpointer(transferCheckpointer, transferId);
-
- using (Stream stream = new MemoryStream())
- {
- header1.Serialize(stream);
-
- await transferCheckpointer.AddNewJobPartAsync(
- transferId: transferId,
- partNumber: 0,
- chunksTotal: chunksTotal,
- headerStream: stream);
- }
- using (Stream stream = new MemoryStream())
- {
- header2.Serialize(stream);
-
- await transferCheckpointer.AddNewJobPartAsync(
- transferId: transferId,
- partNumber: 1,
- chunksTotal: chunksTotal,
- headerStream: stream);
- }
- using (Stream stream = new MemoryStream())
- {
- header3.Serialize(stream);
-
- await transferCheckpointer.AddNewJobPartAsync(
- transferId: transferId,
- partNumber: 2,
- chunksTotal: chunksTotal,
- headerStream: stream);
- }
- using (Stream stream = new MemoryStream())
+ using (Stream stream = await transferCheckpointer.ReadJobPlanFileAsync(
+ transferId,
+ DataMovementConstants.JobPlanFile.JobStatusIndex,
+ DataMovementConstants.IntSizeInBytes))
{
- header4.Serialize(stream);
-
- await transferCheckpointer.AddNewJobPartAsync(
- transferId: transferId,
- partNumber: 3,
- chunksTotal: chunksTotal,
- headerStream: stream);
- }
-
- // Act
- await transferCheckpointer.SetJobTransferStatusAsync(transferId, newStatus);
+ BinaryReader reader = new BinaryReader(stream);
+ JobPlanStatus jobPlanStatus = (JobPlanStatus)reader.ReadInt32();
- // Assert
- header1.AtomicJobStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 0,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header1, stream);
- }
- header2.AtomicJobStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 1,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header2, stream);
- }
- header3.AtomicJobStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 2,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header3, stream);
- }
- header4.AtomicJobStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: 3,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header4, stream);
+ Assert.That(jobPlanStatus.ToDataTransferStatus(), Is.EqualTo(newStatus));
}
}
@@ -1069,11 +865,7 @@ public void SetJobTransferStatusAsync_Error()
// Arrange
string transferId = GetNewTransferId();
- int partNumber = 0;
DataTransferStatus newStatus = SuccessfulCompletedStatus;
- JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
- transferId: transferId,
- partNumber: partNumber);
TransferCheckpointer transferCheckpointer = new LocalTransferCheckpointer(test.DirectoryPath);
@@ -1090,7 +882,6 @@ public async Task SetJobPartTransferStatusAsync()
// Arrange
string transferId = GetNewTransferId();
int partNumber = 0;
- int chunksTotal = 1;
// originally the default is set to Queued
DataTransferStatus newStatus = SuccessfulCompletedStatus;
JobPartPlanHeader header = CheckpointerTesting.CreateDefaultJobPartHeader(
@@ -1107,7 +898,6 @@ public async Task SetJobPartTransferStatusAsync()
await transferCheckpointer.AddNewJobPartAsync(
transferId: transferId,
partNumber: partNumber,
- chunksTotal: chunksTotal,
headerStream: stream);
}
@@ -1115,15 +905,8 @@ await transferCheckpointer.AddNewJobPartAsync(
await transferCheckpointer.SetJobPartTransferStatusAsync(transferId, partNumber, newStatus);
// Assert
- header.AtomicPartStatus = newStatus;
- using (Stream stream = await transferCheckpointer.ReadJobPartPlanFileAsync(
- transferId: transferId,
- partNumber: partNumber,
- offset: 0,
- length: DataMovementConstants.JobPartPlanFile.JobPartHeaderSizeInBytes))
- {
- await CheckpointerTesting.AssertJobPlanHeaderAsync(header, stream);
- }
+ header.JobPartStatus = newStatus;
+ await transferCheckpointer.AssertJobPlanHeaderAsync(transferId, partNumber, header);
}
[Test]
diff --git a/sdk/storage/Azure.Storage.DataMovement/tests/Resources/SampleJobPartPlanFile.b3.ndmpart b/sdk/storage/Azure.Storage.DataMovement/tests/Resources/SampleJobPartPlanFile.b3.ndmpart
new file mode 100644
index 0000000000000000000000000000000000000000..3bcf786284a646c1f8d1986ee989384a35e5a674
GIT binary patch
literal 151
zcmYc?W?(q8YvR$M&={W$8KG*eZ(^>pGC+aLl^d^@-{$biPfpBn%gjlIi34e;oc!c$
zAd!S5Rs>`V0C5u#i$cYjAhZL7VgypoR{F(>xdl0?y2bgWMaiiMeoAU_NoHPRNoIZ?
E00FoodH?_b
literal 0
HcmV?d00001