diff --git a/src/databox/azext_databox/generated/_help.py b/src/databox/azext_databox/generated/_help.py index 8d9a4bb8c89..f9e02206c38 100644 --- a/src/databox/azext_databox/generated/_help.py +++ b/src/databox/azext_databox/generated/_help.py @@ -23,7 +23,7 @@ examples: - name: JobsListByResourceGroup text: |- - az databox job list --resource-group "SdkRg4981" + az databox job list --resource-group "SdkRg5154" """ helps['databox job show'] = """ @@ -32,22 +32,13 @@ examples: - name: JobsGet text: |- - az databox job show --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" - - name: JobsGet1 + az databox job show --expand "details" --name "SdkJob952" --resource-group "SdkRg5154" + - name: JobsGetCmk text: |- - az databox job show --expand "details" --name "SdkJob3970" --resource-group "SdkRg4981" - - name: JobsGet2 + az databox job show --expand "details" --name "SdkJob1735" --resource-group "SdkRg7937" + - name: JobsGetExport text: |- - az databox job show --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" - - name: JobsGet3 - text: |- - az databox job show --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" - - name: JobsGet4 - text: |- - az databox job show --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" - - name: JobsGet5 - text: |- - az databox job show --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" + az databox job show --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" """ helps['databox job create'] = """ @@ -66,38 +57,56 @@ examples: - name: JobsCreate text: |- - az databox job create --name "SdkJob3971" --location "westus" --details "{\\"contactDetails\\":{\\"conta\ -ctName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExte\ -nsion\\":\\"1234\\"},\\"destinationAccountDetails\\":[{\\"dataDestinationType\\":\\"StorageAccount\\",\\"storageAccount\ -Id\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/providers/Microsoft.Storage/stor\ -ageAccounts/databoxbvttestaccount\\"}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"C\ -ommercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\ -\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}\ -}" --sku name="DataBox" --resource-group "SdkRg4981" + az databox job create --name "SdkJob952" --location "westus" --transfer-type "ImportToAzure" --details "\ +{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\\\ +":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\ +\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/d\ +ataboxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\\ +"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\\ +",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEN\ +D ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg5154" + - name: JobsCreateDevicePassword + text: |- + az databox job create --name "SdkJob9640" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"sharePassword\\":\\"Abcd223@22344Abcd223@22344\\",\\"storageAccountId\\":\\"/subscriptions\ +/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvt\ +testaccount2\\"}}],\\"devicePassword\\":\\"Abcd223@22344\\",\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\ +\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\ +\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddr\ +ess2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7478" + - name: JobsCreateExport + text: |- + az databox job create --name "SdkJob6429" --location "westus" --transfer-type "ExportFromAzure" --detail\ +s "{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phon\ +e\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataExportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTy\ +pe\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups\ +/akvenkat/providers/Microsoft.Storage/storageAccounts/aaaaaa2\\"},\\"transferConfiguration\\":{\\"transferAllDetails\\"\ +:{\\"include\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":true}},\ +\\"transferConfigurationType\\":\\"TransferAll\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addre\ +ssType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"p\ +ostalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\ +\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg8091" """ helps['databox job update'] = """ type: command short-summary: Updates the properties of an existing job. - parameters: - - name: --destination-account-details - short-summary: Destination account details. - long-summary: | - Usage: --destination-account-details data-destination-type=XX account-id=XX share-password=XX - - data-destination-type: Required. Data Destination Type. - account-id: Arm Id of the destination where the data has to be moved. - share-password: Share password to be shared by all shares in SA. - - Multiple actions can be specified by using more than one --destination-account-details argument. examples: - name: JobsPatch text: |- - az databox job update --name "SdkJob3971" --details "{\\"contactDetails\\":{\\"contactName\\":\\"Update \ -Job\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"s\ -hippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\ + az databox job update --name "SdkJob952" --details "{\\"contactDetails\\":{\\"contactName\\":\\"Update J\ +ob\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"sh\ +ippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\ \\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ -ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --resource-group "SdkRg4981" +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --resource-group "SdkRg5154" + - name: JobsPatchCmk + text: |- + az databox job update --name "SdkJob1735" --details "{\\"keyEncryptionKey\\":{\\"kekType\\":\\"CustomerM\ +anaged\\",\\"kekUrl\\":\\"https://sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscription\ +s/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}}" --\ +resource-group "SdkRg7937" """ helps['databox job delete'] = """ @@ -106,7 +115,7 @@ examples: - name: JobsDelete text: |- - az databox job delete --name "SdkJob3971" --resource-group "SdkRg4981" + az databox job delete --name "SdkJob952" --resource-group "SdkRg5154" """ helps['databox job book-shipment-pick-up'] = """ @@ -125,7 +134,7 @@ examples: - name: JobsCancelPost text: |- - az databox job cancel --reason "CancelTest" --name "SdkJob3971" --resource-group "SdkRg4981" + az databox job cancel --reason "CancelTest" --name "SdkJob952" --resource-group "SdkRg5154" """ helps['databox job list-credentials'] = """ @@ -143,13 +152,13 @@ examples: - name: Pause executing next line of CLI script until the databox job is successfully created. text: |- - az databox job wait --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" --created + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --created - name: Pause executing next line of CLI script until the databox job is successfully updated. text: |- - az databox job wait --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" --updated + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --updated - name: Pause executing next line of CLI script until the databox job is successfully deleted. text: |- - az databox job wait --expand "details" --name "SdkJob3971" --resource-group "SdkRg4981" --deleted + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --deleted """ helps['databox service'] = """ @@ -157,68 +166,103 @@ short-summary: databox service """ -helps['databox service list-available-sku'] = """ - type: command - short-summary: This method provides the list of available skus for the given subscription and location. - examples: - - name: AvailableSkusPost - text: |- - az databox service list-available-sku --country "US" --location "westus" --location "westus" -""" - helps['databox service list-available-sku-by-resource-group'] = """ type: command short-summary: This method provides the list of available skus for the given subscription, resource group and locat\ ion. examples: - - name: AvailableSkusByResourceGroup + - name: AvailableSkusPost text: |- - az databox service list-available-sku-by-resource-group --country "US" --location "westus" --location "w\ -estus" --resource-group "SdkRg9836" + az databox service list-available-sku-by-resource-group --country "US" --location "westus" --transfer-ty\ +pe "ImportToAzure" --location "westus" --resource-group "bvttoolrg6" """ helps['databox service region-configuration'] = """ type: command - short-summary: This API provides configuration details specific to given region/location. + short-summary: This API provides configuration details specific to given region/location at Subscription level. parameters: - name: --data-box-schedule-availability-request short-summary: Request body to get the availability for scheduling data box orders orders. long-summary: | - Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. - name: --disk-schedule-availability-request short-summary: Request body to get the availability for scheduling disk orders. long-summary: | Usage: --disk-schedule-availability-request expected-data-size-in-terabytes=XX storage-location=XX sku-name\ -=XX +=XX country=XX expected-data-size-in-terabytes: Required. The expected size of the data, which needs to be transferred in \ this job, in terabytes. storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. - name: --heavy-schedule-availability-request short-summary: Request body to get the availability for scheduling heavy orders. long-summary: | - Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. examples: - - name: ServiceRegionConfiguration + - name: RegionConfiguration text: |- az databox service region-configuration --location "westus" --schedule-availability-request "{\\"skuName\ \\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" """ +helps['databox service region-configuration-by-resource-group'] = """ + type: command + short-summary: This API provides configuration details specific to given region/location at Resource group level. + parameters: + - name: --data-box-schedule-availability-request + short-summary: Request body to get the availability for scheduling data box orders orders. + long-summary: | + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ +ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --disk-schedule-availability-request + short-summary: Request body to get the availability for scheduling disk orders. + long-summary: | + Usage: --disk-schedule-availability-request expected-data-size-in-terabytes=XX storage-location=XX sku-name\ +=XX country=XX + + expected-data-size-in-terabytes: Required. The expected size of the data, which needs to be transferred in \ +this job, in terabytes. + storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ +ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --heavy-schedule-availability-request + short-summary: Request body to get the availability for scheduling heavy orders. + long-summary: | + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: https://management.azure.com/s\ +ubscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + examples: + - name: RegionConfigurationByResourceGroup + text: |- + az databox service region-configuration-by-resource-group --location "westus" --schedule-availability-re\ +quest "{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" --resource-group "SdkRg4981" +""" + helps['databox service validate-address'] = """ type: command - short-summary: [DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer shipping\ - address and provide alternate addresses if any. + short-summary: [DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shippin\ +g address and provide alternate addresses if any. parameters: - name: --shipping-address short-summary: Shipping address of the customer. @@ -256,16 +300,21 @@ individual-request-details: Required. List of request details contain validationType and its request as key\ and value respectively. examples: - - name: ServiceValidateInputs + - name: ValidateInputs text: |- az databox service validate-input --location "westus" --validation-request "{\\"individualRequestDetails\ -\\":[{\\"destinationAccountDetails\\":[{\\"dataDestinationType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subsc\ -riptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/providers/Microsoft.Storage/storageAccounts/dat\ -aboxbvttestaccount\\"}],\\"location\\":\\"westus\\",\\"validationType\\":\\"ValidateDataDestinationDetails\\"},{\\"devi\ -ceType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"com\ -panyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"stree\ -tAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"validationType\\":\\"ValidateAddress\\"}],\\"va\ -lidationCategory\\":\\"JobCreationValidation\\"}" +\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\"\ +:\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/providers/Microsoft.Storage/storageAc\ +counts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationT\ +ype\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"C\ +ommercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\ +\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}\ +,\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"},\\"validationType\\":\\"ValidateAddres\ +s\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"},{\\"country\\":\\"US\\",\\"deviceType\\":\\\ +"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateSkuAvail\ +ability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"ValidateCreateOrderLimit\\"},{\\"deviceType\\":\\"D\ +ataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"validat\ +ionType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCreationValidation\\"}" """ helps['databox service validate-input-by-resource-group'] = """ @@ -280,14 +329,20 @@ individual-request-details: Required. List of request details contain validationType and its request as key\ and value respectively. examples: - - name: ServiceValidateInputsByResourceGroup + - name: ValidateInputsByResourceGroup text: |- - az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg9836" --v\ -alidation-request "{\\"individualRequestDetails\\":[{\\"destinationAccountDetails\\":[{\\"dataDestinationType\\":\\"Sto\ -rageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/\ -providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}],\\"location\\":\\"westus\\",\\"validationType\\"\ -:\\"ValidateDataDestinationDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"Comm\ -ercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"9\ -4107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"\ -validationType\\":\\"ValidateAddress\\"}],\\"validationCategory\\":\\"JobCreationValidation\\"}" + az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg6861" --v\ +alidation-request "{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\ +\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/d\ +ataboxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"tra\ +nsferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\\ +",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microso\ +ft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOW\ +NSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftMana\ +ged\\"},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\\ +"},{\\"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzu\ +re\\",\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"Valid\ +ateCreateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipm\ +entType\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCre\ +ationValidation\\"}" """ diff --git a/src/databox/azext_databox/generated/_params.py b/src/databox/azext_databox/generated/_params.py index ef881a0bf34..5ca24953424 100644 --- a/src/databox/azext_databox/generated/_params.py +++ b/src/databox/azext_databox/generated/_params.py @@ -20,7 +20,6 @@ from azure.cli.core.commands.validators import get_default_location_from_resource_group from azext_databox.action import ( AddSku, - AddDestinationAccountDetails, AddDataBoxScheduleAvailabilityRequest, AddDiskScheduleAvailabilityRequest, AddHeavyScheduleAvailabilityRequest, @@ -53,6 +52,9 @@ def load_arguments(self, _): validator=get_default_location_from_resource_group) c.argument('tags', tags_type) c.argument('sku', action=AddSku, nargs='+', help='The sku type.') + c.argument('identity_type', help='Identity type') + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the dat' + 'a transfer.') c.argument('details', arg_type=CLIArgumentType(options_list=['--details'], help='Details of a job run. This fie' 'ld will only be sent for expand details filter. Expected value: json-string/@json-file.')) c.argument('delivery_type', arg_type=get_enum_type(['NonScheduled', 'Scheduled']), @@ -67,10 +69,9 @@ def load_arguments(self, _): c.argument('if_match', help='Defines the If-Match condition. The patch will be performed only if the ETag of th' 'e job on the server matches this value.') c.argument('tags', tags_type) + c.argument('identity_type', help='Identity type') c.argument('details', arg_type=CLIArgumentType(options_list=['--details'], help='Details of a job to be updated' '. Expected value: json-string/@json-file.')) - c.argument('destination_account_details', action=AddDestinationAccountDetails, nargs='+', help='Destination acc' - 'ount details.') with self.argument_context('databox job delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -110,17 +111,12 @@ def load_arguments(self, _): c.argument('expand', help='$expand is supported on details parameter for job, which provides details on the job' ' stages.') - with self.argument_context('databox service list-available-sku') as c: - c.argument('location', arg_type=get_location_type(self.cli_ctx), - validator=get_default_location_from_resource_group) - c.argument('country', help='ISO country code. Country for hardware shipment. For codes check: https://en.wikipe' - 'dia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements') - c.argument('sku_names', nargs='+', help='Sku Names to filter for available skus') - with self.argument_context('databox service list-available-sku-by-resource-group') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the tra' + 'nsfer.') c.argument('country', help='ISO country code. Country for hardware shipment. For codes check: https://en.wikipe' 'dia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements') c.argument('sku_names', nargs='+', help='Sku Names to filter for available skus') @@ -140,12 +136,28 @@ def load_arguments(self, _): c.argument('transport_availability_request_sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBo' 'xHeavy']), help='Type of the device.') + with self.argument_context('databox service region-configuration-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group, id_part='name') + c.argument('data_box_schedule_availability_request', action=AddDataBoxScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling data box orders orders.', arg_group='Sched' + 'uleAvailabilityRequest') + c.argument('disk_schedule_availability_request', action=AddDiskScheduleAvailabilityRequest, nargs='+', help='Re' + 'quest body to get the availability for scheduling disk orders.', arg_group='ScheduleAvailabilityReq' + 'uest') + c.argument('heavy_schedule_availability_request', action=AddHeavyScheduleAvailabilityRequest, nargs='+', help= + 'Request body to get the availability for scheduling heavy orders.', arg_group='ScheduleAvailability' + 'Request') + c.argument('transport_availability_request_sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBo' + 'xHeavy']), help='Type of the device.') + with self.argument_context('databox service validate-address') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group, id_part='name') - c.argument('validation_type', arg_type=get_enum_type(['ValidateAddress', 'ValidateDataDestinationDetails', 'Val' - 'idateSubscriptionIsAllowedToCreateJob', 'ValidatePreferences', 'ValidateCreateOrderLimit', 'Validat' - 'eSkuAvailability']), help='Identifies the type of validation request.') + c.argument('validation_type', arg_type=get_enum_type(['ValidateAddress', 'ValidateSubscriptionIsAllowedToCreate' + 'Job', 'ValidatePreferences', 'ValidateCreateOrderLimit', 'ValidateSkuAvailability', 'ValidateDataTr' + 'ansferDetails']), help='Identifies the type of validation request.') c.argument('shipping_address', action=AddShippingAddress, nargs='+', help='Shipping address of the customer.') c.argument('device_type', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), help='Device type' ' to be used for the job.') diff --git a/src/databox/azext_databox/generated/action.py b/src/databox/azext_databox/generated/action.py index cff0ba6cad3..c1a281db14b 100644 --- a/src/databox/azext_databox/generated/action.py +++ b/src/databox/azext_databox/generated/action.py @@ -40,32 +40,6 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use return d -class AddDestinationAccountDetails(argparse._AppendAction): - def __call__(self, parser, namespace, values, option_string=None): - action = self.get_action(values, option_string) - super(AddDestinationAccountDetails, self).__call__(parser, namespace, action, option_string) - - def get_action(self, values, option_string): # pylint: disable=no-self-use - try: - properties = defaultdict(list) - for (k, v) in (x.split('=', 1) for x in values): - properties[k].append(v) - properties = dict(properties) - except ValueError: - raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) - d = {} - for k in properties: - kl = k.lower() - v = properties[k] - if kl == 'data-destination-type': - d['data_destination_type'] = v[0] - elif kl == 'account-id': - d['account_id'] = v[0] - elif kl == 'share-password': - d['share_password'] = v[0] - return d - - class AddDataBoxScheduleAvailabilityRequest(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) @@ -85,6 +59,8 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use v = properties[k] if kl == 'storage-location': d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] d['sku_name'] = 'DataBox' return d @@ -110,6 +86,8 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['expected_data_size_in_terabytes'] = v[0] elif kl == 'storage-location': d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] d['sku_name'] = 'DataBoxDisk' return d @@ -133,6 +111,8 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use v = properties[k] if kl == 'storage-location': d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] d['sku_name'] = 'DataBoxHeavy' return d diff --git a/src/databox/azext_databox/generated/commands.py b/src/databox/azext_databox/generated/commands.py index ebb0ded20f8..bdf57f12e5d 100644 --- a/src/databox/azext_databox/generated/commands.py +++ b/src/databox/azext_databox/generated/commands.py @@ -33,10 +33,11 @@ def load_command_table(self, _): operations_tmpl='azext_databox.vendored_sdks.databox.operations._service_operations#ServiceOperations.{}', client_factory=cf_service) with self.command_group('databox service', databox_service, client_factory=cf_service, is_experimental=True) as g: - g.custom_command('list-available-sku', 'databox_service_list_available_sku') g.custom_command('list-available-sku-by-resource-group', 'databox_service_list_available_sku_by_resource_group') g.custom_command('region-configuration', 'databox_service_region_configuration') + g.custom_command('region-configuration-by-resource-group', 'databox_service_region_configuration_by_resource_gr' + 'oup') g.custom_command('validate-address', 'databox_service_validate_address') g.custom_command('validate-input', 'databox_service_validate_input') g.custom_command('validate-input-by-resource-group', 'databox_service_validate_input_by_resource_group') diff --git a/src/databox/azext_databox/generated/custom.py b/src/databox/azext_databox/generated/custom.py index 644e8d111cd..6d79463694d 100644 --- a/src/databox/azext_databox/generated/custom.py +++ b/src/databox/azext_databox/generated/custom.py @@ -38,7 +38,9 @@ def databox_job_create(client, job_name, location, sku, + transfer_type, tags=None, + identity_type=None, details=None, delivery_type=None, delivery_info_scheduled_date_time=None, @@ -52,6 +54,8 @@ def databox_job_create(client, location=location, tags=tags, sku=sku, + type=identity_type, + transfer_type=transfer_type, details=details, delivery_type=delivery_type, scheduled_date_time=delivery_info_scheduled_date_time) @@ -62,8 +66,8 @@ def databox_job_update(client, job_name, if_match=None, tags=None, + identity_type=None, details=None, - destination_account_details=None, no_wait=False): if isinstance(details, str): details = json.loads(details) @@ -73,8 +77,8 @@ def databox_job_update(client, job_name=job_name, if_match=if_match, tags=tags, - details=details, - destination_account_details=destination_account_details) + type=identity_type, + details=details) def databox_job_delete(client, @@ -116,23 +120,15 @@ def databox_job_list_credentials(client, job_name=job_name) -def databox_service_list_available_sku(client, - country, - location=None, - sku_names=None): - return client.list_available_sku(location=location, - country=country, - available_sku_request_location=location, - sku_names=sku_names) - - def databox_service_list_available_sku_by_resource_group(client, resource_group_name, + transfer_type, country, location=None, sku_names=None): return client.list_available_sku_by_resource_group(resource_group_name=resource_group_name, location=location, + transfer_type=transfer_type, country=country, available_sku_request_location=location, sku_names=sku_names) @@ -160,6 +156,30 @@ def databox_service_region_configuration(client, sku_name=transport_availability_request_sku_name) +def databox_service_region_configuration_by_resource_group(client, + resource_group_name, + location, + data_box_schedule_availability_request=None, + disk_schedule_availability_request=None, + heavy_schedule_availability_request=None, + transport_availability_request_sku_name=None): + all_schedule_availability_request = [] + if data_box_schedule_availability_request is not None: + all_schedule_availability_request.append(data_box_schedule_availability_request) + if disk_schedule_availability_request is not None: + all_schedule_availability_request.append(disk_schedule_availability_request) + if heavy_schedule_availability_request is not None: + all_schedule_availability_request.append(heavy_schedule_availability_request) + if len(all_schedule_availability_request) > 1: + raise CLIError('at most one of data_box_schedule_availability_request, disk_schedule_availability_request, hea' + 'vy_schedule_availability_request is needed for schedule_availability_request!') + schedule_availability_request = all_schedule_availability_request[0] if len(all_schedule_availability_request) == 1 else None + return client.region_configuration_by_resource_group(resource_group_name=resource_group_name, + location=location, + schedule_availability_request=schedule_availability_request, + sku_name=transport_availability_request_sku_name) + + def databox_service_validate_address(client, location, validation_type, diff --git a/src/databox/azext_databox/tests/latest/test_databox_scenario.py b/src/databox/azext_databox/tests/latest/test_databox_scenario.py index 09da5bea581..15fd3d82259 100644 --- a/src/databox/azext_databox/tests/latest/test_databox_scenario.py +++ b/src/databox/azext_databox/tests/latest/test_databox_scenario.py @@ -19,99 +19,86 @@ @try_manual -def setup(test, rg, rg_2, rg_3, rg_4): +def setup(test, rg, rg_2, rg_3, rg_4, rg_5): pass # EXAMPLE: JobsCreate @try_manual -def step_jobscreate(test, rg, rg_2, rg_3, rg_4): +def step_jobscreate(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job create ' - '--name "{SdkJob3971}" ' + '--name "{SdkJob952}" ' '--location "westus" ' + '--transfer-type "ImportToAzure" ' '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' - 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"destinationAccountDeta' - 'ils\\":[{{\\"dataDestinationType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscr' - 'iption_id}/resourcegroups/{rg}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}],\\"jobDetailsType\\' - '":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\"' - ',\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince' - '\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}}}" ' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourcegroups/{rg}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"job' + 'DetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San' + ' Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"st' + 'ateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}}}' + '" ' '--sku name="DataBox" ' - '--resource-group "{rg_2}"', + '--resource-group "{rg_4}"', checks=[]) # EXAMPLE: JobsGet5 @try_manual -def step_jobsget5(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox job show ' - '--expand "details" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', - checks=[]) +def step_jobsget5(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: JobsGet4 @try_manual -def step_jobsget4(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox job show ' - '--expand "details" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', - checks=[]) +def step_jobsget4(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: JobsGet3 @try_manual -def step_jobsget3(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox job show ' - '--expand "details" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', - checks=[]) +def step_jobsget3(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: JobsGet2 @try_manual -def step_jobsget2(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox job show ' - '--expand "details" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', - checks=[]) +def step_jobsget2(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: JobsGet1 @try_manual -def step_jobsget1(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox job show ' - '--expand "details" ' - '--name "{Jobs_2}" ' - '--resource-group "{rg_2}"', - checks=[]) +def step_jobsget1(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: JobsGet @try_manual -def step_jobsget(test, rg, rg_2, rg_3, rg_4): +def step_jobsget(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job show ' '--expand "details" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', + '--name "{SdkJob952}" ' + '--resource-group "{rg_4}"', checks=[]) # EXAMPLE: JobsListByResourceGroup @try_manual -def step_jobslistbyresourcegroup(test, rg, rg_2, rg_3, rg_4): +def step_jobslistbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job list ' - '--resource-group "{rg_2}"', + '--resource-group "{rg_4}"', checks=[]) # EXAMPLE: JobsList @try_manual -def step_jobslist(test, rg, rg_2, rg_3, rg_4): +def step_jobslist(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job list ' '-g ""', checks=[]) @@ -119,45 +106,31 @@ def step_jobslist(test, rg, rg_2, rg_3, rg_4): # EXAMPLE: OperationsGet @try_manual -def step_operationsget(test, rg, rg_2, rg_3, rg_4): +def step_operationsget(test, rg, rg_2, rg_3, rg_4, rg_5): # EXAMPLE NOT FOUND! pass # EXAMPLE: ServiceValidateInputsByResourceGroup @try_manual -def step_servicevalidateinputsbyresourcegroup(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox service validate-input-by-resource-group ' - '--location "westus" ' - '--resource-group "{rg_3}" ' - '--validation-request "{{\\"individualRequestDetails\\":[{{\\"destinationAccountDetails\\":[{{\\"dataDesti' - 'nationType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/resourcegro' - 'ups/{rg}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}],\\"location\\":\\"westus\\",\\"validation' - 'Type\\":\\"ValidateDataDestinationDetails\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{' - '\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"co' - 'untry\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 ' - 'TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}},\\"validationType\\":\\"ValidateAddress\\"}}],\\"valid' - 'ationCategory\\":\\"JobCreationValidation\\"}}"', - checks=[]) +def step_servicevalidateinputsbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: AvailableSkusByResourceGroup @try_manual -def step_availableskusbyresourcegroup(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox service list-available-sku-by-resource-group ' - '--country "US" ' - '--location "westus" ' - '--location "westus" ' - '--resource-group "{rg_3}"', - checks=[]) +def step_availableskusbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: BookShipmentPickupPost @try_manual -def step_bookshipmentpickuppost(test, rg, rg_2, rg_3, rg_4): +def step_bookshipmentpickuppost(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job book-shipment-pick-up ' - '--name "{Jobs_3}" ' - '--resource-group "{rg_4}" ' + '--name "{Jobs_2}" ' + '--resource-group "{rg_5}" ' '--end-time "2019-09-22T18:30:00Z" ' '--shipment-location "Front desk" ' '--start-time "2019-09-20T18:30:00Z"', @@ -166,49 +139,47 @@ def step_bookshipmentpickuppost(test, rg, rg_2, rg_3, rg_4): # EXAMPLE: JobsListCredentials @try_manual -def step_jobslistcredentials(test, rg, rg_2, rg_3, rg_4): +def step_jobslistcredentials(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job list-credentials ' - '--name "{Jobs_3}" ' - '--resource-group "{rg_4}"', + '--name "{Jobs_2}" ' + '--resource-group "{rg_5}"', checks=[]) # EXAMPLE: JobsCancelPost @try_manual -def step_jobscancelpost(test, rg, rg_2, rg_3, rg_4): +def step_jobscancelpost(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job cancel ' '--reason "CancelTest" ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', + '--name "{SdkJob952}" ' + '--resource-group "{rg_4}"', checks=[]) # EXAMPLE: JobsPatch @try_manual -def step_jobspatch(test, rg, rg_2, rg_3, rg_4): +def step_jobspatch(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job update ' - '--name "{SdkJob3971}" ' + '--name "{SdkJob952}" ' '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Update Job\\",\\"emailList\\":[\\"testing@micros' 'oft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"shippingAddress\\":{{\\"add' 'ressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country' '\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNS' 'END ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}}}" ' - '--resource-group "{rg_2}"', + '--resource-group "{rg_4}"', checks=[]) # EXAMPLE: ServiceRegionConfiguration @try_manual -def step_serviceregionconfiguration(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox service region-configuration ' - '--location "westus" ' - '--schedule-availability-request "{{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}}"', - checks=[]) +def step_serviceregionconfiguration(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: ValidateAddressPost @try_manual -def step_validateaddresspost(test, rg, rg_2, rg_3, rg_4): +def step_validateaddresspost(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox service validate-address ' '--location "westus" ' '--device-type "DataBox" ' @@ -220,91 +191,88 @@ def step_validateaddresspost(test, rg, rg_2, rg_3, rg_4): # EXAMPLE: ServiceValidateInputs @try_manual -def step_servicevalidateinputs(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox service validate-input ' - '--location "westus" ' - '--validation-request "{{\\"individualRequestDetails\\":[{{\\"destinationAccountDetails\\":[{{\\"dataDesti' - 'nationType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/resourcegro' - 'ups/{rg}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}],\\"location\\":\\"westus\\",\\"validation' - 'Type\\":\\"ValidateDataDestinationDetails\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{' - '\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"co' - 'untry\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 ' - 'TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}},\\"validationType\\":\\"ValidateAddress\\"}}],\\"valid' - 'ationCategory\\":\\"JobCreationValidation\\"}}"', - checks=[]) +def step_servicevalidateinputs(test, rg, rg_2, rg_3, rg_4, rg_5): + # EXAMPLE NOT FOUND! + pass # EXAMPLE: AvailableSkusPost @try_manual -def step_availableskuspost(test, rg, rg_2, rg_3, rg_4): - test.cmd('az databox service list-available-sku ' +def step_availableskuspost(test, rg, rg_2, rg_3, rg_4, rg_5): + test.cmd('az databox service list-available-sku-by-resource-group ' '--country "US" ' '--location "westus" ' - '--location "westus"', + '--transfer-type "ImportToAzure" ' + '--location "westus" ' + '--resource-group "{rg_5}"', checks=[]) # EXAMPLE: JobsDelete @try_manual -def step_jobsdelete(test, rg, rg_2, rg_3, rg_4): +def step_jobsdelete(test, rg, rg_2, rg_3, rg_4, rg_5): test.cmd('az databox job delete ' - '--name "{SdkJob3971}" ' - '--resource-group "{rg_2}"', + '--name "{SdkJob952}" ' + '--resource-group "{rg_4}"', checks=[]) @try_manual -def cleanup(test, rg, rg_2, rg_3, rg_4): +def cleanup(test, rg, rg_2, rg_3, rg_4, rg_5): pass @try_manual -def call_scenario(test, rg, rg_2, rg_3, rg_4): - setup(test, rg, rg_2, rg_3, rg_4) - step_jobscreate(test, rg, rg_2, rg_3, rg_4) - step_jobsget5(test, rg, rg_2, rg_3, rg_4) - step_jobsget4(test, rg, rg_2, rg_3, rg_4) - step_jobsget3(test, rg, rg_2, rg_3, rg_4) - step_jobsget2(test, rg, rg_2, rg_3, rg_4) - step_jobsget1(test, rg, rg_2, rg_3, rg_4) - step_jobsget(test, rg, rg_2, rg_3, rg_4) - step_jobslistbyresourcegroup(test, rg, rg_2, rg_3, rg_4) - step_jobslist(test, rg, rg_2, rg_3, rg_4) - step_operationsget(test, rg, rg_2, rg_3, rg_4) - step_servicevalidateinputsbyresourcegroup(test, rg, rg_2, rg_3, rg_4) - step_availableskusbyresourcegroup(test, rg, rg_2, rg_3, rg_4) - step_bookshipmentpickuppost(test, rg, rg_2, rg_3, rg_4) - step_jobslistcredentials(test, rg, rg_2, rg_3, rg_4) - step_jobscancelpost(test, rg, rg_2, rg_3, rg_4) - step_jobspatch(test, rg, rg_2, rg_3, rg_4) - step_serviceregionconfiguration(test, rg, rg_2, rg_3, rg_4) - step_validateaddresspost(test, rg, rg_2, rg_3, rg_4) - step_servicevalidateinputs(test, rg, rg_2, rg_3, rg_4) - step_availableskuspost(test, rg, rg_2, rg_3, rg_4) - step_jobsdelete(test, rg, rg_2, rg_3, rg_4) - cleanup(test, rg, rg_2, rg_3, rg_4) +def call_scenario(test, rg, rg_2, rg_3, rg_4, rg_5): + setup(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobscreate(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget5(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget4(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget3(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget2(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget1(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsget(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobslistbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobslist(test, rg, rg_2, rg_3, rg_4, rg_5) + step_operationsget(test, rg, rg_2, rg_3, rg_4, rg_5) + step_servicevalidateinputsbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5) + step_availableskusbyresourcegroup(test, rg, rg_2, rg_3, rg_4, rg_5) + step_bookshipmentpickuppost(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobslistcredentials(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobscancelpost(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobspatch(test, rg, rg_2, rg_3, rg_4, rg_5) + step_serviceregionconfiguration(test, rg, rg_2, rg_3, rg_4, rg_5) + step_validateaddresspost(test, rg, rg_2, rg_3, rg_4, rg_5) + step_servicevalidateinputs(test, rg, rg_2, rg_3, rg_4, rg_5) + step_availableskuspost(test, rg, rg_2, rg_3, rg_4, rg_5) + step_jobsdelete(test, rg, rg_2, rg_3, rg_4, rg_5) + cleanup(test, rg, rg_2, rg_3, rg_4, rg_5) @try_manual class DataBoxManagementClientScenarioTest(ScenarioTest): @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt'[:7], key='rg', parameter_name='rg') - @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg4981'[:7], key='rg_2', parameter_name='rg_2') - @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg9836'[:7], key='rg_3', parameter_name='rg_3') - @ResourceGroupPreparer(name_prefix='clitestdatabox_bvttoolrg6'[:7], key='rg_4', parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt1'[:7], key='rg_2', parameter_name='rg_2') + @ResourceGroupPreparer(name_prefix='clitestdatabox_akvenkat'[:7], key='rg_3', parameter_name='rg_3') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg5154'[:7], key='rg_4', parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestdatabox_bvttoolrg6'[:7], key='rg_5', parameter_name='rg_5') @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount'[:7], key='sa', resource_group_parameter_name='rg') - def test_databox(self, rg, rg_2, rg_3, rg_4): + @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount2'[:7], key='sa_2', + resource_group_parameter_name='rg_2') + @StorageAccountPreparer(name_prefix='clitestdatabox_aaaaaa2'[:7], key='sa_3', + resource_group_parameter_name='rg_3') + def test_databox(self, rg, rg_2, rg_3, rg_4, rg_5): self.kwargs.update({ 'subscription_id': self.get_subscription_id() }) self.kwargs.update({ - 'SdkJob3971': 'SdkJob3971', - 'Jobs_2': 'SdkJob3970', - 'Jobs_3': 'TJ-636646322037905056', + 'SdkJob952': 'SdkJob952', + 'Jobs_2': 'TJ-636646322037905056', }) - call_scenario(self, rg, rg_2, rg_3, rg_4) + call_scenario(self, rg, rg_2, rg_3, rg_4, rg_5) raise_if() diff --git a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py index ed97e72f002..8b1944c0f00 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py +++ b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py @@ -46,7 +46,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id - self.api_version = "2019-09-01" + self.api_version = "2020-04-01" self.credential_scopes = ['https://management.azure.com/.default'] self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration_async.py b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration_async.py index 40db044789c..e7a2c4a346b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration_async.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration_async.py @@ -43,7 +43,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id - self.api_version = "2019-09-01" + self.api_version = "2020-04-01" self.credential_scopes = ['https://management.azure.com/.default'] self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_job_operations_async.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_job_operations_async.py index 908c6e7cadf..3c496af9648 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_job_operations_async.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_job_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -62,7 +62,7 @@ def list( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -103,8 +103,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -134,7 +135,7 @@ def list_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -176,8 +177,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -211,7 +213,7 @@ async def get( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -239,7 +241,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -255,7 +258,9 @@ async def _create_initial( job_name: str, location: str, sku: "models.Sku", + transfer_type: Union[str, "models.TransferType"], tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, details: Optional["models.JobDetails"] = None, delivery_type: Optional[Union[str, "models.JobDeliveryType"]] = None, scheduled_date_time: Optional[datetime.datetime] = None, @@ -265,8 +270,8 @@ async def _create_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _job_resource = models.JobResource(location=location, tags=tags, sku=sku, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time) - api_version = "2019-09-01" + _job_resource = models.JobResource(location=location, tags=tags, sku=sku, type=type, transfer_type=transfer_type, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time) + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -298,7 +303,8 @@ async def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -316,7 +322,9 @@ async def create( job_name: str, location: str, sku: "models.Sku", + transfer_type: Union[str, "models.TransferType"], tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, details: Optional["models.JobDetails"] = None, delivery_type: Optional[Union[str, "models.JobDeliveryType"]] = None, scheduled_date_time: Optional[datetime.datetime] = None, @@ -336,9 +344,13 @@ async def create( :type location: str :param sku: The sku type. :type sku: ~data_box_management_client.models.Sku + :param transfer_type: Type of the data transfer. + :type transfer_type: str or ~data_box_management_client.models.TransferType :param tags: The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str :param details: Details of a job run. This field will only be sent for expand details filter. :type details: ~data_box_management_client.models.JobDetails :param delivery_type: Delivery type of Job. @@ -365,7 +377,9 @@ async def create( job_name=job_name, location=location, sku=sku, + transfer_type=transfer_type, tags=tags, + type=type, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time, @@ -398,7 +412,7 @@ async def _delete_initial( cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore @@ -421,9 +435,10 @@ async def _delete_initial( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -484,16 +499,16 @@ async def _update_initial( job_name: str, if_match: Optional[str] = None, tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, details: Optional["models.UpdateJobDetails"] = None, - destination_account_details: Optional[List["models.DestinationAccountDetails"]] = None, **kwargs ) -> "models.JobResource": cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _job_resource_update_parameter = models.JobResourceUpdateParameter(tags=tags, details=details, destination_account_details=destination_account_details) - api_version = "2019-09-01" + _job_resource_update_parameter = models.JobResourceUpdateParameter(tags=tags, type=type, details=details) + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -527,7 +542,8 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -545,8 +561,8 @@ async def update( job_name: str, if_match: Optional[str] = None, tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, details: Optional["models.UpdateJobDetails"] = None, - destination_account_details: Optional[List["models.DestinationAccountDetails"]] = None, **kwargs ) -> "models.JobResource": """Updates the properties of an existing job. @@ -562,10 +578,10 @@ async def update( :param tags: The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str :param details: Details of a job to be updated. :type details: ~data_box_management_client.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~data_box_management_client.models.DestinationAccountDetails] :keyword callable cls: A custom type or function that will be passed the direct response :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy @@ -586,8 +602,8 @@ async def update( job_name=job_name, if_match=if_match, tags=tags, + type=type, details=details, - destination_account_details=destination_account_details, cls=lambda x,y,z: x, **kwargs ) @@ -642,7 +658,7 @@ async def book_shipment_pick_up( error_map.update(kwargs.pop('error_map', {})) _shipment_pick_up_request = models.ShipmentPickUpRequest(start_time=start_time, end_time=end_time, shipment_location=shipment_location) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -674,7 +690,8 @@ async def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -710,7 +727,7 @@ async def cancel( error_map.update(kwargs.pop('error_map', {})) _cancellation_reason = models.CancellationReason(reason=reason) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -741,7 +758,8 @@ async def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -769,7 +787,7 @@ def list_credentials( cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -810,8 +828,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_operation_operations_async.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_operation_operations_async.py index 8bc5fb793f6..48b54c88d97 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_operation_operations_async.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_operation_operations_async.py @@ -55,7 +55,7 @@ def list( cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -90,8 +90,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_service_operations_async.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_service_operations_async.py index e1f2b17af7e..16792bba7ac 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_service_operations_async.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations_async/_service_operations_async.py @@ -41,96 +41,11 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - def list_available_sku( - self, - location: str, - country: str, - available_sku_request_location: str, - sku_names: Optional[List[Union[str, "models.SkuName"]]] = None, - **kwargs - ) -> AsyncIterable["models.AvailableSkusResult"]: - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param country: ISO country code. Country for hardware shipment. For codes check: - https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. - :type country: str - :param available_sku_request_location: Location for data transfer. For locations check: - https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. - :type available_sku_request_location: str - :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~data_box_management_client.models.SkuName] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - _available_sku_request = models.AvailableSkuRequest(country=country, location=available_sku_request_location, sku_names=sku_names) - api_version = "2019-09-01" - content_type = "application/json" - transfer_type = "ImportToAzure" - - def prepare_request(next_link=None): - if not next_link: - # Construct URL - url = self.list_available_sku.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - # Construct and send request - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_available_sku.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_sku_by_resource_group( self, resource_group_name: str, location: str, + transfer_type: Union[str, "models.TransferType"], country: str, available_sku_request_location: str, sku_names: Optional[List[Union[str, "models.SkuName"]]] = None, @@ -142,6 +57,8 @@ def list_available_sku_by_resource_group( :type resource_group_name: str :param location: The location of the resource. :type location: str + :param transfer_type: Type of the transfer. + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -158,10 +75,9 @@ def list_available_sku_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _available_sku_request = models.AvailableSkuRequest(country=country, location=available_sku_request_location, sku_names=sku_names) - api_version = "2019-09-01" + _available_sku_request = models.AvailableSkuRequest(transfer_type=transfer_type, country=country, location=available_sku_request_location, sku_names=sku_names) + api_version = "2020-04-01" content_type = "application/json" - transfer_type = "ImportToAzure" def prepare_request(next_link=None): if not next_link: @@ -207,8 +123,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -226,7 +143,7 @@ async def validate_address( preferred_shipment_type: Optional[Union[str, "models.TransportShipmentTypes"]] = None, **kwargs ) -> "models.AddressValidationOutput": - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer shipping address and provide alternate addresses if any. + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str @@ -248,7 +165,7 @@ async def validate_address( error_map.update(kwargs.pop('error_map', {})) _validate_address = models.ValidateAddress(validation_type=validation_type, shipping_address=shipping_address, device_type=device_type, preferred_shipment_type=preferred_shipment_type) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -279,7 +196,8 @@ async def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -312,7 +230,7 @@ async def validate_input_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -344,7 +262,8 @@ async def validate_input_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -374,7 +293,7 @@ async def validate_input( cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -405,7 +324,8 @@ async def validate_input( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -422,7 +342,7 @@ async def region_configuration( sku_name: Optional[Union[str, "models.SkuName"]] = None, **kwargs ) -> "models.RegionConfigurationResponse": - """This API provides configuration details specific to given region/location. + """This API provides configuration details specific to given region/location at Subscription level. :param location: The location of the resource. :type location: str @@ -441,7 +361,7 @@ async def region_configuration( error_map.update(kwargs.pop('error_map', {})) _region_configuration_request = models.RegionConfigurationRequest(schedule_availability_request=schedule_availability_request, sku_name=sku_name) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -472,7 +392,8 @@ async def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -481,3 +402,75 @@ async def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + async def region_configuration_by_resource_group( + self, + resource_group_name: str, + location: str, + schedule_availability_request: Optional["models.ScheduleAvailabilityRequest"] = None, + sku_name: Optional[Union[str, "models.SkuName"]] = None, + **kwargs + ) -> "models.RegionConfigurationResponse": + """This API provides configuration details specific to given region/location at Resource group level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param schedule_availability_request: Request body to get the availability for scheduling + orders. + :type schedule_availability_request: ~data_box_management_client.models.ScheduleAvailabilityRequest + :param sku_name: Type of the device. + :type sku_name: str or ~data_box_management_client.models.SkuName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _region_configuration_request = models.RegionConfigurationRequest(schedule_availability_request=schedule_availability_request, sku_name=sku_name) + api_version = "2020-04-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py index 3cf355d4852..2911ac32bb7 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py @@ -8,12 +8,16 @@ try: from ._models_py3 import AccountCredentialDetails + from ._models_py3 import AdditionalErrorInfo from ._models_py3 import AddressValidationOutput from ._models_py3 import AddressValidationProperties + from ._models_py3 import ApiError from ._models_py3 import ApplianceNetworkConfiguration from ._models_py3 import ArmBaseObject from ._models_py3 import AvailableSkuRequest from ._models_py3 import AvailableSkusResult + from ._models_py3 import AzureFileFilterDetails + from ._models_py3 import BlobFilterDetails from ._models_py3 import CancellationReason from ._models_py3 import CloudError from ._models_py3 import ContactDetails @@ -22,6 +26,7 @@ from ._models_py3 import CreateJobValidations from ._models_py3 import CreateOrderLimitForSubscriptionValidationRequest from ._models_py3 import CreateOrderLimitForSubscriptionValidationResponseProperties + from ._models_py3 import DataAccountDetails from ._models_py3 import DataBoxAccountCopyLogDetails from ._models_py3 import DataBoxDiskCopyLogDetails from ._models_py3 import DataBoxDiskCopyProgress @@ -34,25 +39,27 @@ from ._models_py3 import DataBoxJobDetails from ._models_py3 import DataBoxScheduleAvailabilityRequest from ._models_py3 import DataBoxSecret - from ._models_py3 import DataDestinationDetailsValidationRequest - from ._models_py3 import DataDestinationDetailsValidationResponseProperties + from ._models_py3 import DataExportDetails + from ._models_py3 import DataImportDetails + from ._models_py3 import DataLocationToServiceLocationMap + from ._models_py3 import DataTransferDetailsValidationRequest + from ._models_py3 import DataTransferDetailsValidationResponseProperties from ._models_py3 import DataboxJobSecrets from ._models_py3 import DcAccessSecurityCode - from ._models_py3 import DestinationAccountDetails - from ._models_py3 import DestinationManagedDiskDetails - from ._models_py3 import DestinationStorageAccountDetails - from ._models_py3 import DestinationToServiceLocationMap + from ._models_py3 import Details from ._models_py3 import DiskScheduleAvailabilityRequest from ._models_py3 import DiskSecret - from ._models_py3 import Error + from ._models_py3 import ErrorDetail + from ._models_py3 import FilterFileDetails from ._models_py3 import HeavyScheduleAvailabilityRequest from ._models_py3 import JobDetails - from ._models_py3 import JobErrorDetails from ._models_py3 import JobResource from ._models_py3 import JobResourceList from ._models_py3 import JobResourceUpdateParameter from ._models_py3 import JobSecrets from ._models_py3 import JobStages + from ._models_py3 import KeyEncryptionKey + from ._models_py3 import ManagedDiskDetails from ._models_py3 import NotificationPreference from ._models_py3 import Operation from ._models_py3 import OperationDisplay @@ -76,8 +83,14 @@ from ._models_py3 import SkuCapacity from ._models_py3 import SkuCost from ._models_py3 import SkuInformation + from ._models_py3 import StorageAccountDetails from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationRequest from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationResponseProperties + from ._models_py3 import TransferAllDetails + from ._models_py3 import TransferConfiguration + from ._models_py3 import TransferConfigurationTransferAllDetails + from ._models_py3 import TransferConfigurationTransferFilterDetails + from ._models_py3 import TransferFilterDetails from ._models_py3 import TransportAvailabilityDetails from ._models_py3 import TransportAvailabilityResponse from ._models_py3 import TransportPreferences @@ -91,12 +104,16 @@ from ._models_py3 import ValidationResponse except (SyntaxError, ImportError): from ._models import AccountCredentialDetails # type: ignore + from ._models import AdditionalErrorInfo # type: ignore from ._models import AddressValidationOutput # type: ignore from ._models import AddressValidationProperties # type: ignore + from ._models import ApiError # type: ignore from ._models import ApplianceNetworkConfiguration # type: ignore from ._models import ArmBaseObject # type: ignore from ._models import AvailableSkuRequest # type: ignore from ._models import AvailableSkusResult # type: ignore + from ._models import AzureFileFilterDetails # type: ignore + from ._models import BlobFilterDetails # type: ignore from ._models import CancellationReason # type: ignore from ._models import CloudError # type: ignore from ._models import ContactDetails # type: ignore @@ -105,6 +122,7 @@ from ._models import CreateJobValidations # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationRequest # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationResponseProperties # type: ignore + from ._models import DataAccountDetails # type: ignore from ._models import DataBoxAccountCopyLogDetails # type: ignore from ._models import DataBoxDiskCopyLogDetails # type: ignore from ._models import DataBoxDiskCopyProgress # type: ignore @@ -117,25 +135,27 @@ from ._models import DataBoxJobDetails # type: ignore from ._models import DataBoxScheduleAvailabilityRequest # type: ignore from ._models import DataBoxSecret # type: ignore - from ._models import DataDestinationDetailsValidationRequest # type: ignore - from ._models import DataDestinationDetailsValidationResponseProperties # type: ignore + from ._models import DataExportDetails # type: ignore + from ._models import DataImportDetails # type: ignore + from ._models import DataLocationToServiceLocationMap # type: ignore + from ._models import DataTransferDetailsValidationRequest # type: ignore + from ._models import DataTransferDetailsValidationResponseProperties # type: ignore from ._models import DataboxJobSecrets # type: ignore from ._models import DcAccessSecurityCode # type: ignore - from ._models import DestinationAccountDetails # type: ignore - from ._models import DestinationManagedDiskDetails # type: ignore - from ._models import DestinationStorageAccountDetails # type: ignore - from ._models import DestinationToServiceLocationMap # type: ignore + from ._models import Details # type: ignore from ._models import DiskScheduleAvailabilityRequest # type: ignore from ._models import DiskSecret # type: ignore - from ._models import Error # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import FilterFileDetails # type: ignore from ._models import HeavyScheduleAvailabilityRequest # type: ignore from ._models import JobDetails # type: ignore - from ._models import JobErrorDetails # type: ignore from ._models import JobResource # type: ignore from ._models import JobResourceList # type: ignore from ._models import JobResourceUpdateParameter # type: ignore from ._models import JobSecrets # type: ignore from ._models import JobStages # type: ignore + from ._models import KeyEncryptionKey # type: ignore + from ._models import ManagedDiskDetails # type: ignore from ._models import NotificationPreference # type: ignore from ._models import Operation # type: ignore from ._models import OperationDisplay # type: ignore @@ -159,8 +179,14 @@ from ._models import SkuCapacity # type: ignore from ._models import SkuCost # type: ignore from ._models import SkuInformation # type: ignore + from ._models import StorageAccountDetails # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationRequest # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationResponseProperties # type: ignore + from ._models import TransferAllDetails # type: ignore + from ._models import TransferConfiguration # type: ignore + from ._models import TransferConfigurationTransferAllDetails # type: ignore + from ._models import TransferConfigurationTransferFilterDetails # type: ignore + from ._models import TransferFilterDetails # type: ignore from ._models import TransportAvailabilityDetails # type: ignore from ._models import TransportAvailabilityResponse # type: ignore from ._models import TransportPreferences # type: ignore @@ -179,8 +205,11 @@ AddressValidationStatus, ClassDiscriminator, CopyStatus, - DataDestinationType, + DataAccountType, + FilterFileType, JobDeliveryType, + KekType, + LogCollectionLevel, NotificationStageName, OverallValidationStatus, ShareDestinationFormatType, @@ -188,6 +217,8 @@ SkuName, StageName, StageStatus, + TransferConfigurationType, + TransferType, TransportShipmentTypes, ValidationInputDiscriminator, ValidationStatus, @@ -195,12 +226,16 @@ __all__ = [ 'AccountCredentialDetails', + 'AdditionalErrorInfo', 'AddressValidationOutput', 'AddressValidationProperties', + 'ApiError', 'ApplianceNetworkConfiguration', 'ArmBaseObject', 'AvailableSkuRequest', 'AvailableSkusResult', + 'AzureFileFilterDetails', + 'BlobFilterDetails', 'CancellationReason', 'CloudError', 'ContactDetails', @@ -209,6 +244,7 @@ 'CreateJobValidations', 'CreateOrderLimitForSubscriptionValidationRequest', 'CreateOrderLimitForSubscriptionValidationResponseProperties', + 'DataAccountDetails', 'DataBoxAccountCopyLogDetails', 'DataBoxDiskCopyLogDetails', 'DataBoxDiskCopyProgress', @@ -221,25 +257,27 @@ 'DataBoxJobDetails', 'DataBoxScheduleAvailabilityRequest', 'DataBoxSecret', - 'DataDestinationDetailsValidationRequest', - 'DataDestinationDetailsValidationResponseProperties', + 'DataExportDetails', + 'DataImportDetails', + 'DataLocationToServiceLocationMap', + 'DataTransferDetailsValidationRequest', + 'DataTransferDetailsValidationResponseProperties', 'DataboxJobSecrets', 'DcAccessSecurityCode', - 'DestinationAccountDetails', - 'DestinationManagedDiskDetails', - 'DestinationStorageAccountDetails', - 'DestinationToServiceLocationMap', + 'Details', 'DiskScheduleAvailabilityRequest', 'DiskSecret', - 'Error', + 'ErrorDetail', + 'FilterFileDetails', 'HeavyScheduleAvailabilityRequest', 'JobDetails', - 'JobErrorDetails', 'JobResource', 'JobResourceList', 'JobResourceUpdateParameter', 'JobSecrets', 'JobStages', + 'KeyEncryptionKey', + 'ManagedDiskDetails', 'NotificationPreference', 'Operation', 'OperationDisplay', @@ -263,8 +301,14 @@ 'SkuCapacity', 'SkuCost', 'SkuInformation', + 'StorageAccountDetails', 'SubscriptionIsAllowedToCreateJobValidationRequest', 'SubscriptionIsAllowedToCreateJobValidationResponseProperties', + 'TransferAllDetails', + 'TransferConfiguration', + 'TransferConfigurationTransferAllDetails', + 'TransferConfigurationTransferFilterDetails', + 'TransferFilterDetails', 'TransportAvailabilityDetails', 'TransportAvailabilityResponse', 'TransportPreferences', @@ -281,8 +325,11 @@ 'AddressValidationStatus', 'ClassDiscriminator', 'CopyStatus', - 'DataDestinationType', + 'DataAccountType', + 'FilterFileType', 'JobDeliveryType', + 'KekType', + 'LogCollectionLevel', 'NotificationStageName', 'OverallValidationStatus', 'ShareDestinationFormatType', @@ -290,6 +337,8 @@ 'SkuName', 'StageName', 'StageStatus', + 'TransferConfigurationType', + 'TransferType', 'TransportShipmentTypes', 'ValidationInputDiscriminator', 'ValidationStatus', diff --git a/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py index d7d118fdcb5..a37109dacbc 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py @@ -33,9 +33,9 @@ class ClassDiscriminator(str, Enum): """Indicates the type of job details. """ - data_box = "DataBox" #: Databox orders. - data_box_disk = "DataBoxDisk" #: DataboxDisk orders. - data_box_heavy = "DataBoxHeavy" #: DataboxHeavy orders. + data_box = "DataBox" #: Data Box orders. + data_box_disk = "DataBoxDisk" #: Data Box Disk orders. + data_box_heavy = "DataBoxHeavy" #: Data Box Heavy orders. class CopyStatus(str, Enum): """The Status of the copy @@ -53,13 +53,20 @@ class CopyStatus(str, Enum): storage_account_not_accessible = "StorageAccountNotAccessible" #: Data copy failed. Storage Account was not accessible during copy. unsupported_data = "UnsupportedData" #: Data copy failed. The Device data content is not supported. -class DataDestinationType(str, Enum): - """Data Destination Type. +class DataAccountType(str, Enum): + """Type of the account. """ storage_account = "StorageAccount" #: Storage Accounts . managed_disk = "ManagedDisk" #: Azure Managed disk storage. +class FilterFileType(str, Enum): + """Type of the filter file. + """ + + azure_blob = "AzureBlob" #: Filter file is of the type AzureBlob. + azure_file = "AzureFile" #: Filter file is of the type AzureFiles. + class JobDeliveryType(str, Enum): """Delivery type of Job. """ @@ -67,6 +74,20 @@ class JobDeliveryType(str, Enum): non_scheduled = "NonScheduled" #: Non Scheduled job. scheduled = "Scheduled" #: Scheduled job. +class KekType(str, Enum): + """Type of encryption key used for key encryption. + """ + + microsoft_managed = "MicrosoftManaged" #: Key encryption key is managed by Microsoft. + customer_managed = "CustomerManaged" #: Key encryption key is managed by the Customer. + +class LogCollectionLevel(str, Enum): + """Level of the logs to be collected. + """ + + error = "Error" #: Only Errors will be collected in the logs. + verbose = "Verbose" #: Verbose logging (includes Errors, CRC, size information and others). + class NotificationStageName(str, Enum): """Name of the stage. """ @@ -75,7 +96,7 @@ class NotificationStageName(str, Enum): dispatched = "Dispatched" #: Notification at device dispatched stage. delivered = "Delivered" #: Notification at device delivered stage. picked_up = "PickedUp" #: Notification at device picked up from user stage. - at_azure_dc = "AtAzureDC" #: Notification at device received at azure datacenter stage. + at_azure_dc = "AtAzureDC" #: Notification at device received at Azure datacenter stage. data_copy = "DataCopy" #: Notification at data copy started stage. class OverallValidationStatus(str, Enum): @@ -110,9 +131,9 @@ class SkuDisabledReason(str, Enum): class SkuName(str, Enum): - data_box = "DataBox" #: Databox. - data_box_disk = "DataBoxDisk" #: DataboxDisk. - data_box_heavy = "DataBoxHeavy" #: DataboxHeavy. + data_box = "DataBox" #: Data Box. + data_box_disk = "DataBoxDisk" #: Data Box Disk. + data_box_heavy = "DataBoxHeavy" #: Data Box Heavy. class StageName(str, Enum): """Name of the stage which is in progress. @@ -122,14 +143,14 @@ class StageName(str, Enum): device_prepared = "DevicePrepared" #: A device has been prepared for the order. dispatched = "Dispatched" #: Device has been dispatched to the user of the order. delivered = "Delivered" #: Device has been delivered to the user of the order. - picked_up = "PickedUp" #: Device has been picked up from user and in transit to azure datacenter. - at_azure_dc = "AtAzureDC" #: Device has been received at azure datacenter from the user. - data_copy = "DataCopy" #: Data copy from the device at azure datacenter. + picked_up = "PickedUp" #: Device has been picked up from user and in transit to Azure datacenter. + at_azure_dc = "AtAzureDC" #: Device has been received at Azure datacenter from the user. + data_copy = "DataCopy" #: Data copy from the device at Azure datacenter. completed = "Completed" #: Order has completed. completed_with_errors = "CompletedWithErrors" #: Order has completed with errors. cancelled = "Cancelled" #: Order has been cancelled. failed_issue_reported_at_customer = "Failed_IssueReportedAtCustomer" #: Order has failed due to issue reported by user. - failed_issue_detected_at_azure_dc = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at azure datacenter. + failed_issue_detected_at_azure_dc = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at Azure datacenter. aborted = "Aborted" #: Order has been aborted. completed_with_warnings = "CompletedWithWarnings" #: Order has completed with warnings. ready_to_dispatch_from_azure_dc = "ReadyToDispatchFromAzureDC" #: Device is ready to be handed to customer from Azure DC. @@ -146,6 +167,22 @@ class StageStatus(str, Enum): cancelled = "Cancelled" #: Stage has been cancelled. cancelling = "Cancelling" #: Stage is cancelling. succeeded_with_errors = "SucceededWithErrors" #: Stage has succeeded with errors. + waiting_for_customer_action = "WaitingForCustomerAction" #: Stage is stuck until customer takes some action. + succeeded_with_warnings = "SucceededWithWarnings" #: Stage has succeeded with warnings. + +class TransferConfigurationType(str, Enum): + """Type of the configuration for transfer. + """ + + transfer_all = "TransferAll" #: Transfer all the data. + transfer_using_filter = "TransferUsingFilter" #: Transfer using filter. + +class TransferType(str, Enum): + """Type of the transfer. + """ + + import_to_azure = "ImportToAzure" #: Import data to azure. + export_from_azure = "ExportFromAzure" #: Export data from azure. class TransportShipmentTypes(str, Enum): """Transport Shipment Type supported for given region. @@ -159,11 +196,11 @@ class ValidationInputDiscriminator(str, Enum): """ validate_address = "ValidateAddress" #: Identify request and response of address validation. - validate_data_destination_details = "ValidateDataDestinationDetails" #: Identify request and response of data destination details validation. validate_subscription_is_allowed_to_create_job = "ValidateSubscriptionIsAllowedToCreateJob" #: Identify request and response for validation of subscription permission to create job. validate_preferences = "ValidatePreferences" #: Identify request and response of preference validation. validate_create_order_limit = "ValidateCreateOrderLimit" #: Identify request and response of create order limit for subscription validation. validate_sku_availability = "ValidateSkuAvailability" #: Identify request and response of active job limit for sku availability. + validate_data_transfer_details = "ValidateDataTransferDetails" #: Identify request and response of data transfer details validation. class ValidationStatus(str, Enum): """Create order limit validation status. diff --git a/src/databox/azext_databox/vendored_sdks/databox/models/_models.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py index 10c4cbf8886..451cdbe27dc 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/models/_models.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py @@ -6,6 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -16,9 +17,9 @@ class AccountCredentialDetails(msrest.serialization.Model): :ivar account_name: Name of the account. :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~data_box_management_client.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_connection_string: Connection string of the account endpoint to use the account as a storage endpoint on the device. :vartype account_connection_string: str @@ -29,14 +30,14 @@ class AccountCredentialDetails(msrest.serialization.Model): _validation = { 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_connection_string': {'readonly': True}, 'share_credential_details': {'readonly': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, } @@ -47,23 +48,46 @@ def __init__( ): super(AccountCredentialDetails, self).__init__(**kwargs) self.account_name = None - self.data_destination_type = None + self.data_account_type = None self.account_connection_string = None self.share_credential_details = None +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.info = kwargs.get('info', None) + + class AddressValidationOutput(msrest.serialization.Model): """Output of the address validation api. Variables are only populated by the server, and will be ignored when sending a request. :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus @@ -79,7 +103,7 @@ class AddressValidationOutput(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -99,19 +123,19 @@ class ValidationInputResponse(msrest.serialization.Model): """Minimum properties that should be present in each individual validation response. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { @@ -121,11 +145,11 @@ class ValidationInputResponse(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} } def __init__( @@ -145,12 +169,12 @@ class AddressValidationProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus @@ -167,7 +191,7 @@ class AddressValidationProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'validation_status': {'key': 'validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -182,6 +206,31 @@ def __init__( self.alternate_addresses = None +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = kwargs['error'] + + class ApplianceNetworkConfiguration(msrest.serialization.Model): """The Network Adapter configuration of a DataBox. @@ -250,12 +299,11 @@ def __init__( class AvailableSkuRequest(msrest.serialization.Model): """The filters for showing the available skus. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -267,7 +315,7 @@ class AvailableSkuRequest(msrest.serialization.Model): """ _validation = { - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -279,13 +327,12 @@ class AvailableSkuRequest(msrest.serialization.Model): 'sku_names': {'key': 'skuNames', 'type': '[str]'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs ): super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] self.sku_names = kwargs.get('sku_names', None) @@ -320,6 +367,60 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = kwargs.get('file_prefix_list', None) + self.file_path_list = kwargs.get('file_path_list', None) + self.file_share_list = kwargs.get('file_share_list', None) + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = kwargs.get('blob_prefix_list', None) + self.blob_path_list = kwargs.get('blob_path_list', None) + self.container_list = kwargs.get('container_list', None) + + class CancellationReason(msrest.serialization.Model): """Reason for cancellation. @@ -346,23 +447,25 @@ def __init__( class CloudError(msrest.serialization.Model): - """The error information object. + """Cloud error. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. :type target: str - :param details: More detailed error information. - :type details: list[~data_box_management_client.models.CloudError] + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, } _attribute_map = { @@ -370,6 +473,7 @@ class CloudError(msrest.serialization.Model): 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( @@ -377,10 +481,11 @@ def __init__( **kwargs ): super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + self.details = None + self.additional_info = None class ContactDetails(msrest.serialization.Model): @@ -468,20 +573,24 @@ class CopyProgress(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~data_box_management_client.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_id: Id of the account where the data needs to be uploaded. :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long :ivar total_bytes_to_process: Total amount of data to be processed by the job. :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. + :ivar files_processed: Number of files processed. :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. + :ivar total_files_to_process: Total files to process. :vartype total_files_to_process: long :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which were processed by automatic renaming. @@ -494,13 +603,21 @@ class CopyProgress(msrest.serialization.Model): :vartype renamed_container_count: long :ivar files_errored_out: Number of files which could not be copied. :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool """ _validation = { 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, + 'bytes_processed': {'readonly': True}, 'total_bytes_to_process': {'readonly': True}, 'files_processed': {'readonly': True}, 'total_files_to_process': {'readonly': True}, @@ -508,13 +625,17 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'readonly': True}, 'renamed_container_count': {'readonly': True}, 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, } _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, @@ -522,6 +643,9 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, } def __init__( @@ -530,9 +654,10 @@ def __init__( ): super(CopyProgress, self).__init__(**kwargs) self.storage_account_name = None - self.data_destination_type = None + self.transfer_type = None + self.data_account_type = None self.account_id = None - self.bytes_sent_to_cloud = None + self.bytes_processed = None self.total_bytes_to_process = None self.files_processed = None self.total_files_to_process = None @@ -540,33 +665,36 @@ def __init__( self.invalid_file_bytes_uploaded = None self.renamed_container_count = None self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. + """Minimum request requirement of any validation category. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CreateJobValidations. All required parameters must be populated in order to send to Azure. + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str :param individual_request_details: Required. List of request details contain validationType and its request as key and value respectively. :type individual_request_details: list[~data_box_management_client.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } _subtype_map = { @@ -578,8 +706,8 @@ def __init__( **kwargs ): super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = kwargs['individual_request_details'] self.validation_category = None + self.individual_request_details = kwargs['individual_request_details'] class CreateJobValidations(ValidationRequest): @@ -587,23 +715,23 @@ class CreateJobValidations(ValidationRequest): All required parameters must be populated in order to send to Azure. + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str :param individual_request_details: Required. List of request details contain validationType and its request as key and value respectively. :type individual_request_details: list[~data_box_management_client.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } def __init__( @@ -618,14 +746,14 @@ class ValidationInputRequest(msrest.serialization.Model): """Minimum fields that must be present in any type of validation request. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ @@ -638,7 +766,7 @@ class ValidationInputRequest(msrest.serialization.Model): } _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} } def __init__( @@ -655,9 +783,9 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". @@ -691,12 +819,12 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Create order limit validation status. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -710,7 +838,7 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -723,6 +851,48 @@ def __init__( self.status = None +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type = None + self.share_password = kwargs.get('share_password', None) + + class DataBoxAccountCopyLogDetails(CopyLogDetails): """Copy log details for a storage account of a DataBox job. @@ -733,22 +903,27 @@ class DataBoxAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator - :ivar account_name: Destination account name. + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, } def __init__( @@ -759,6 +934,7 @@ def __init__( self.copy_log_details_type = 'DataBox' self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxDiskCopyLogDetails(CopyLogDetails): @@ -856,24 +1032,20 @@ class JobDetails(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -885,36 +1057,40 @@ class JobDetails(msrest.serialization.Model): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, } _subtype_map = { @@ -926,19 +1102,20 @@ def __init__( **kwargs ): super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = kwargs.get('expected_data_size_in_terabytes', None) self.job_stages = None self.contact_details = kwargs['contact_details'] - self.shipping_address = kwargs['shipping_address'] + self.shipping_address = kwargs.get('shipping_address', None) self.delivery_package = None self.return_package = None - self.destination_account_details = kwargs['destination_account_details'] - self.error_details = None + self.data_import_details = kwargs.get('data_import_details', None) + self.data_export_details = kwargs.get('data_export_details', None) self.job_details_type = None self.preferences = kwargs.get('preferences', None) self.copy_log_details = None self.reverse_shipment_label_sas_key = None self.chain_of_custody_sas_key = None + self.key_encryption_key = None + self.expected_data_size_in_terabytes = kwargs.get('expected_data_size_in_terabytes', None) class DataBoxDiskJobDetails(JobDetails): @@ -948,24 +1125,20 @@ class DataBoxDiskJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -977,6 +1150,11 @@ class DataBoxDiskJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :param preferred_disks: User preference on what size disks are needed for the job. The map is from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but will be checked against an int. @@ -993,33 +1171,32 @@ class DataBoxDiskJobDetails(JobDetails): _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, 'disks_and_size_details': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, @@ -1044,22 +1221,29 @@ class JobSecrets(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { @@ -1072,7 +1256,8 @@ def __init__( ): super(JobSecrets, self).__init__(**kwargs) self.job_secrets_type = None - self.dc_access_security_code = kwargs.get('dc_access_security_code', None) + self.dc_access_security_code = None + self.error = None class DataBoxDiskJobSecrets(JobSecrets): @@ -1085,8 +1270,10 @@ class DataBoxDiskJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar disk_secrets: Contains the list of secrets object for that device. :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] :ivar pass_key: PassKey for the disk Job. @@ -1097,6 +1284,8 @@ class DataBoxDiskJobSecrets(JobSecrets): _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'disk_secrets': {'readonly': True}, 'pass_key': {'readonly': True}, 'is_passkey_user_defined': {'readonly': True}, @@ -1105,6 +1294,7 @@ class DataBoxDiskJobSecrets(JobSecrets): _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, 'pass_key': {'key': 'passKey', 'type': 'str'}, 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, @@ -1131,22 +1321,27 @@ class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator - :ivar account_name: Destination account name. + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, } def __init__( @@ -1157,6 +1352,7 @@ def __init__( self.copy_log_details_type = 'DataBoxHeavy' self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxHeavyJobDetails(JobDetails): @@ -1166,24 +1362,20 @@ class DataBoxHeavyJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -1195,41 +1387,50 @@ class DataBoxHeavyJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :ivar copy_progress: Copy progress per account. :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1254,20 +1455,25 @@ class DataBoxHeavyJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'cabinet_pod_secrets': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, } @@ -1335,24 +1541,20 @@ class DataBoxJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -1364,41 +1566,50 @@ class DataBoxJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :ivar copy_progress: Copy progress per storage account. :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1416,24 +1627,31 @@ def __init__( class DataboxJobSecrets(JobSecrets): """The secrets related to a databox job. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :param pod_secrets: Contains the list of secret objects for a job. :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, } @@ -1454,13 +1672,14 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1471,6 +1690,7 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } _subtype_map = { @@ -1484,6 +1704,7 @@ def __init__( super(ScheduleAvailabilityRequest, self).__init__(**kwargs) self.storage_location = kwargs['storage_location'] self.sku_name = None + self.country = kwargs.get('country', None) class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1491,13 +1712,14 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1508,6 +1730,7 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( @@ -1566,258 +1789,235 @@ def __init__( self.account_credential_details = None -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' - self.destination_account_details = kwargs['destination_account_details'] - self.location = kwargs['location'] + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = kwargs['transfer_configuration'] + self.log_collection_level = kwargs.get('log_collection_level', None) + self.account_details = kwargs['account_details'] -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. - - Variables are only populated by the server, and will be ignored when sending a request. +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~data_box_management_client.models.ValidationStatus + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' - self.status = None + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = kwargs['account_details'] -class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. - :type reverse_dc_access_code: str + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar data_location: Location of the data. + :vartype data_location: str + :ivar service_location: Location of the service. + :vartype service_location: str """ + _validation = { + 'data_location': {'readonly': True}, + 'service_location': {'readonly': True}, + } + _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'data_location': {'key': 'dataLocation', 'type': 'str'}, + 'service_location': {'key': 'serviceLocation', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) - self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) - + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None + self.service_location = None -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType """ _validation = { - 'data_destination_type': {'required': True}, + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type = None - self.account_id = kwargs.get('account_id', None) - self.share_password = kwargs.get('share_password', None) + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' + self.data_export_details = kwargs.get('data_export_details', None) + self.data_import_details = kwargs.get('data_import_details', None) + self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationManagedDiskDetails, self).__init__(**kwargs) - self.data_destination_type = 'ManagedDisk' - self.resource_group_id = kwargs['resource_group_id'] - self.staging_storage_account_id = kwargs['staging_storage_account_id'] - + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' + self.status = None -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. - All required parameters must be populated in order to send to Azure. +class DcAccessSecurityCode(msrest.serialization.Model): + """Dc access security code. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str + :param reverse_dc_access_code: Reverse Dc access security code. + :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str """ - _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, - } - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationStorageAccountDetails, self).__init__(**kwargs) - self.data_destination_type = 'StorageAccount' - self.storage_account_id = kwargs['storage_account_id'] + super(DcAccessSecurityCode, self).__init__(**kwargs) + self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) + self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. +class Details(msrest.serialization.Model): + """Details. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar destination_location: Location of the destination. - :vartype destination_location: str - :ivar service_location: Location of the service. - :vartype service_location: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str """ _validation = { - 'destination_location': {'readonly': True}, - 'service_location': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, - 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None - self.service_location = None + super(Details, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1825,13 +2025,14 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to be transferred in this job, in terabytes. :type expected_data_size_in_terabytes: int @@ -1846,6 +2047,7 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, } @@ -1889,111 +2091,115 @@ def __init__( self.bit_locker_key = None -class Error(msrest.serialization.Model): - """Top level error for the job. +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None + super(ErrorDetail, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.details = kwargs.get('details', None) + self.target = kwargs.get('target', None) -class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling heavy orders. +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~data_box_management_client.models.SkuName + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str """ _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, } _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HeavyScheduleAvailabilityRequest, self).__init__(**kwargs) - self.sku_name = 'DataBoxHeavy' + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = kwargs['filter_file_type'] + self.filter_file_path = kwargs['filter_file_path'] -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. +class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling heavy orders. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, } _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( self, **kwargs ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None + super(HeavyScheduleAvailabilityRequest, self).__init__(**kwargs) + self.sku_name = 'DataBoxHeavy' class Resource(msrest.serialization.Model): """Model of the Resource. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param location: Required. The location of the resource. This will be one of the supported and @@ -2006,17 +2212,28 @@ class Resource(msrest.serialization.Model): :type tags: dict[str, str] :param sku: Required. The sku type. :type sku: ~data_box_management_client.models.Sku + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str """ _validation = { 'location': {'required': True}, 'sku': {'required': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, } _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'type': {'key': 'identity.type', 'type': 'str'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, } def __init__( @@ -2027,6 +2244,9 @@ def __init__( self.location = kwargs['location'] self.tags = kwargs.get('tags', None) self.sku = kwargs['sku'] + self.type = kwargs.get('type', None) + self.principal_id = None + self.tenant_id = None class JobResource(Resource): @@ -2046,18 +2266,27 @@ class JobResource(Resource): :type tags: dict[str, str] :param sku: Required. The sku type. :type sku: ~data_box_management_client.models.Sku + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str :ivar name: Name of the object. :vartype name: str :ivar id: Id of the object. :vartype id: str :ivar type: Type of the object. :vartype type: str + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :ivar is_cancellable: Describes whether the job is cancellable or not. :vartype is_cancellable: bool :ivar is_deletable: Describes whether the job is deletable or not. :vartype is_deletable: bool :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", @@ -2067,7 +2296,7 @@ class JobResource(Resource): :ivar start_time: Time at which the job was started in UTC ISO 8601 format. :vartype start_time: ~datetime.datetime :ivar error: Top level error for the job. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :param details: Details of a job run. This field will only be sent for expand details filter. :type details: ~data_box_management_client.models.JobDetails :ivar cancellation_reason: Reason for cancellation. @@ -2084,12 +2313,16 @@ class JobResource(Resource): _validation = { 'location': {'required': True}, 'sku': {'required': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, 'name': {'readonly': True}, 'id': {'readonly': True}, 'type': {'readonly': True}, + 'transfer_type': {'required': True}, 'is_cancellable': {'readonly': True}, 'is_deletable': {'readonly': True}, 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'error': {'readonly': True}, @@ -2101,15 +2334,19 @@ class JobResource(Resource): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'details': {'key': 'properties.details', 'type': 'JobDetails'}, 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, @@ -2125,9 +2362,11 @@ def __init__( self.name = None self.id = None self.type = None + self.transfer_type = kwargs['transfer_type'] self.is_cancellable = None self.is_deletable = None self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None self.status = None self.start_time = None self.error = None @@ -2164,20 +2403,32 @@ def __init__( class JobResourceUpdateParameter(msrest.serialization.Model): """The JobResourceUpdateParameter. + Variables are only populated by the server, and will be ignored when sending a request. + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str :param details: Details of a job to be updated. :type details: ~data_box_management_client.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] """ + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'type': {'key': 'identity.type', 'type': 'str'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, } def __init__( @@ -2186,8 +2437,10 @@ def __init__( ): super(JobResourceUpdateParameter, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) + self.type = kwargs.get('type', None) + self.principal_id = None + self.tenant_id = None self.details = kwargs.get('details', None) - self.destination_account_details = kwargs.get('destination_account_details', None) class JobStages(msrest.serialization.Model): @@ -2204,14 +2457,13 @@ class JobStages(msrest.serialization.Model): :ivar display_name: Display name of the job stage. :vartype display_name: str :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings". :vartype stage_status: str or ~data_box_management_client.models.StageStatus :ivar stage_time: Time for the job stage in UTC ISO 8601 format. :vartype stage_time: ~datetime.datetime :ivar job_stage_details: Job Stage Details. :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] """ _validation = { @@ -2220,7 +2472,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'readonly': True}, 'stage_time': {'readonly': True}, 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, } _attribute_map = { @@ -2229,7 +2480,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'key': 'stageStatus', 'type': 'str'}, 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, } def __init__( @@ -2242,7 +2492,86 @@ def __init__( self.stage_status = None self.stage_time = None self.job_stage_details = None - self.error_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kwargs['kek_type'] + self.kek_url = kwargs.get('kek_url', None) + self.kek_vault_resource_id = kwargs.get('kek_vault_resource_id', None) + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedDiskDetails, self).__init__(**kwargs) + self.data_account_type = 'ManagedDisk' + self.resource_group_id = kwargs['resource_group_id'] + self.staging_storage_account_id = kwargs['staging_storage_account_id'] class NotificationPreference(msrest.serialization.Model): @@ -2290,6 +2619,8 @@ class Operation(msrest.serialization.Model): :vartype properties: object :ivar origin: Origin of the operation. Can be : user|system|user,system. :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool """ _validation = { @@ -2304,6 +2635,7 @@ class Operation(msrest.serialization.Model): 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'properties': {'key': 'properties', 'type': 'object'}, 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( @@ -2315,6 +2647,7 @@ def __init__( self.display = None self.properties = None self.origin = None + self.is_data_action = kwargs.get('is_data_action', None) class OperationDisplay(msrest.serialization.Model): @@ -2415,7 +2748,7 @@ def __init__( class Preferences(msrest.serialization.Model): """Preferences related to the order. - :param preferred_data_center_region: Preferred Data Center Region. + :param preferred_data_center_region: Preferred data center region. :type preferred_data_center_region: list[str] :param transport_preferences: Preferences related to the shipment logistics of the sku. :type transport_preferences: ~data_box_management_client.models.TransportPreferences @@ -2441,11 +2774,11 @@ class PreferencesValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. + :param preference: Preference of transport and data center. :type preference: ~data_box_management_client.models.Preferences :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". @@ -2481,12 +2814,12 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of requested data center and transport. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -2500,7 +2833,7 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2572,7 +2905,7 @@ def __init__( class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. + """Schedule availability for given sku in a region. Variables are only populated by the server, and will be ignored when sending a request. @@ -2812,20 +3145,19 @@ def __init__( class SkuAvailabilityValidationRequest(ValidationInputRequest): """Request to validate sku availability. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type device_type: str or ~data_box_management_client.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -2837,7 +3169,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): _validation = { 'validation_type': {'required': True}, 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -2850,8 +3182,6 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): 'location': {'key': 'location', 'type': 'str'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs @@ -2859,6 +3189,7 @@ def __init__( super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) self.validation_type = 'ValidateSkuAvailability' self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] @@ -2871,12 +3202,12 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -2890,7 +3221,7 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2942,16 +3273,22 @@ class SkuCost(msrest.serialization.Model): :vartype meter_id: str :ivar meter_type: The type of the meter. :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float """ _validation = { 'meter_id': {'readonly': True}, 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, } _attribute_map = { 'meter_id': {'key': 'meterId', 'type': 'str'}, 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, } def __init__( @@ -2961,6 +3298,7 @@ def __init__( super(SkuCost, self).__init__(**kwargs) self.meter_id = None self.meter_type = None + self.multiplier = None class SkuInformation(msrest.serialization.Model): @@ -2972,9 +3310,9 @@ class SkuInformation(msrest.serialization.Model): :vartype sku: ~data_box_management_client.models.Sku :ivar enabled: The sku is enabled or not. :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~data_box_management_client.models.DestinationToServiceLocationMap] + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] :ivar capacity: Capacity of the Sku. :vartype capacity: ~data_box_management_client.models.SkuCapacity :ivar costs: Cost of the Sku. @@ -2993,7 +3331,7 @@ class SkuInformation(msrest.serialization.Model): _validation = { 'sku': {'readonly': True}, 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, 'capacity': {'readonly': True}, 'costs': {'readonly': True}, 'api_versions': {'readonly': True}, @@ -3005,7 +3343,7 @@ class SkuInformation(msrest.serialization.Model): _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, @@ -3021,7 +3359,7 @@ def __init__( super(SkuInformation, self).__init__(**kwargs) self.sku = None self.enabled = None - self.destination_to_service_location_map = None + self.data_location_to_service_location_map = None self.capacity = None self.costs = None self.api_versions = None @@ -3030,15 +3368,54 @@ def __init__( self.required_feature = None +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountDetails, self).__init__(**kwargs) + self.data_account_type = 'StorageAccount' + self.storage_account_id = kwargs['storage_account_id'] + + class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): """Request to validate subscription permission to create jobs. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ @@ -3066,12 +3443,12 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of subscription permission to create job. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -3085,7 +3462,7 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3098,6 +3475,155 @@ def __init__( self.status = None +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.transfer_all_blobs = kwargs.get('transfer_all_blobs', None) + self.transfer_all_files = kwargs.get('transfer_all_files', None) + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = kwargs['transfer_configuration_type'] + self.transfer_filter_details = kwargs.get('transfer_filter_details', None) + self.transfer_all_details = kwargs.get('transfer_all_details', None) + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.blob_filter_details = kwargs.get('blob_filter_details', None) + self.azure_file_filter_details = kwargs.get('azure_file_filter_details', None) + self.filter_file_details = kwargs.get('filter_file_details', None) + + class TransportAvailabilityDetails(msrest.serialization.Model): """Transport options availability details for given region. @@ -3236,11 +3762,14 @@ class UpdateJobDetails(msrest.serialization.Model): :type contact_details: ~data_box_management_client.models.ContactDetails :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey """ _attribute_map = { 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, } def __init__( @@ -3250,6 +3779,7 @@ def __init__( super(UpdateJobDetails, self).__init__(**kwargs) self.contact_details = kwargs.get('contact_details', None) self.shipping_address = kwargs.get('shipping_address', None) + self.key_encryption_key = kwargs.get('key_encryption_key', None) class ValidateAddress(ValidationInputRequest): @@ -3258,9 +3788,9 @@ class ValidateAddress(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param shipping_address: Required. Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress diff --git a/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py index 6a7c429222d..7e4712c37a6 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py @@ -9,6 +9,7 @@ import datetime from typing import Dict, List, Optional, Union +from azure.core.exceptions import HttpResponseError import msrest.serialization from ._data_box_management_client_enums import * @@ -21,9 +22,9 @@ class AccountCredentialDetails(msrest.serialization.Model): :ivar account_name: Name of the account. :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~data_box_management_client.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_connection_string: Connection string of the account endpoint to use the account as a storage endpoint on the device. :vartype account_connection_string: str @@ -34,14 +35,14 @@ class AccountCredentialDetails(msrest.serialization.Model): _validation = { 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_connection_string': {'readonly': True}, 'share_credential_details': {'readonly': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, } @@ -52,23 +53,49 @@ def __init__( ): super(AccountCredentialDetails, self).__init__(**kwargs) self.account_name = None - self.data_destination_type = None + self.data_account_type = None self.account_connection_string = None self.share_credential_details = None +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + info: Optional[object] = None, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = type + self.info = info + + class AddressValidationOutput(msrest.serialization.Model): """Output of the address validation api. Variables are only populated by the server, and will be ignored when sending a request. :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus @@ -84,7 +111,7 @@ class AddressValidationOutput(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -104,19 +131,19 @@ class ValidationInputResponse(msrest.serialization.Model): """Minimum properties that should be present in each individual validation response. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { @@ -126,11 +153,11 @@ class ValidationInputResponse(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} } def __init__( @@ -150,12 +177,12 @@ class AddressValidationProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus @@ -172,7 +199,7 @@ class AddressValidationProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'validation_status': {'key': 'validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -187,6 +214,33 @@ def __init__( self.alternate_addresses = None +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: "ErrorDetail", + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = error + + class ApplianceNetworkConfiguration(msrest.serialization.Model): """The Network Adapter configuration of a DataBox. @@ -255,12 +309,11 @@ def __init__( class AvailableSkuRequest(msrest.serialization.Model): """The filters for showing the available skus. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -272,7 +325,7 @@ class AvailableSkuRequest(msrest.serialization.Model): """ _validation = { - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -284,17 +337,17 @@ class AvailableSkuRequest(msrest.serialization.Model): 'sku_names': {'key': 'skuNames', 'type': '[str]'}, } - transfer_type = "ImportToAzure" - def __init__( self, *, + transfer_type: Union[str, "TransferType"], country: str, location: str, sku_names: Optional[List[Union[str, "SkuName"]]] = None, **kwargs ): super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = transfer_type self.country = country self.location = location self.sku_names = sku_names @@ -331,6 +384,68 @@ def __init__( self.next_link = next_link +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + *, + file_prefix_list: Optional[List[str]] = None, + file_path_list: Optional[List[str]] = None, + file_share_list: Optional[List[str]] = None, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = file_prefix_list + self.file_path_list = file_path_list + self.file_share_list = file_share_list + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + *, + blob_prefix_list: Optional[List[str]] = None, + blob_path_list: Optional[List[str]] = None, + container_list: Optional[List[str]] = None, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = blob_prefix_list + self.blob_path_list = blob_path_list + self.container_list = container_list + + class CancellationReason(msrest.serialization.Model): """Reason for cancellation. @@ -359,23 +474,25 @@ def __init__( class CloudError(msrest.serialization.Model): - """The error information object. + """Cloud error. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. :type target: str - :param details: More detailed error information. - :type details: list[~data_box_management_client.models.CloudError] + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, } _attribute_map = { @@ -383,20 +500,23 @@ class CloudError(msrest.serialization.Model): 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( self, *, + code: Optional[str] = None, + message: Optional[str] = None, target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, **kwargs ): super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None + self.code = code + self.message = message self.target = target - self.details = details + self.details = None + self.additional_info = None class ContactDetails(msrest.serialization.Model): @@ -491,20 +611,24 @@ class CopyProgress(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~data_box_management_client.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_id: Id of the account where the data needs to be uploaded. :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long :ivar total_bytes_to_process: Total amount of data to be processed by the job. :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. + :ivar files_processed: Number of files processed. :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. + :ivar total_files_to_process: Total files to process. :vartype total_files_to_process: long :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which were processed by automatic renaming. @@ -517,13 +641,21 @@ class CopyProgress(msrest.serialization.Model): :vartype renamed_container_count: long :ivar files_errored_out: Number of files which could not be copied. :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool """ _validation = { 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, + 'bytes_processed': {'readonly': True}, 'total_bytes_to_process': {'readonly': True}, 'files_processed': {'readonly': True}, 'total_files_to_process': {'readonly': True}, @@ -531,13 +663,17 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'readonly': True}, 'renamed_container_count': {'readonly': True}, 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, } _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, @@ -545,6 +681,9 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, } def __init__( @@ -553,9 +692,10 @@ def __init__( ): super(CopyProgress, self).__init__(**kwargs) self.storage_account_name = None - self.data_destination_type = None + self.transfer_type = None + self.data_account_type = None self.account_id = None - self.bytes_sent_to_cloud = None + self.bytes_processed = None self.total_bytes_to_process = None self.files_processed = None self.total_files_to_process = None @@ -563,33 +703,36 @@ def __init__( self.invalid_file_bytes_uploaded = None self.renamed_container_count = None self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. + """Minimum request requirement of any validation category. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CreateJobValidations. All required parameters must be populated in order to send to Azure. + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str :param individual_request_details: Required. List of request details contain validationType and its request as key and value respectively. :type individual_request_details: list[~data_box_management_client.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } _subtype_map = { @@ -603,8 +746,8 @@ def __init__( **kwargs ): super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = individual_request_details self.validation_category: Optional[str] = None + self.individual_request_details = individual_request_details class CreateJobValidations(ValidationRequest): @@ -612,23 +755,23 @@ class CreateJobValidations(ValidationRequest): All required parameters must be populated in order to send to Azure. + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str :param individual_request_details: Required. List of request details contain validationType and its request as key and value respectively. :type individual_request_details: list[~data_box_management_client.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } def __init__( @@ -645,14 +788,14 @@ class ValidationInputRequest(msrest.serialization.Model): """Minimum fields that must be present in any type of validation request. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ @@ -665,7 +808,7 @@ class ValidationInputRequest(msrest.serialization.Model): } _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} } def __init__( @@ -682,9 +825,9 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". @@ -720,12 +863,12 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Create order limit validation status. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -739,7 +882,7 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -752,6 +895,50 @@ def __init__( self.status = None +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + *, + share_password: Optional[str] = None, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type: Optional[str] = None + self.share_password = share_password + + class DataBoxAccountCopyLogDetails(CopyLogDetails): """Copy log details for a storage account of a DataBox job. @@ -762,22 +949,27 @@ class DataBoxAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator - :ivar account_name: Destination account name. + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, } def __init__( @@ -788,6 +980,7 @@ def __init__( self.copy_log_details_type: str = 'DataBox' self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxDiskCopyLogDetails(CopyLogDetails): @@ -885,24 +1078,20 @@ class JobDetails(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -914,36 +1103,40 @@ class JobDetails(msrest.serialization.Model): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, } _subtype_map = { @@ -954,26 +1147,28 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + expected_data_size_in_terabytes: Optional[int] = None, **kwargs ): super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = expected_data_size_in_terabytes self.job_stages = None self.contact_details = contact_details self.shipping_address = shipping_address self.delivery_package = None self.return_package = None - self.destination_account_details = destination_account_details - self.error_details = None + self.data_import_details = data_import_details + self.data_export_details = data_export_details self.job_details_type: Optional[str] = None self.preferences = preferences self.copy_log_details = None self.reverse_shipment_label_sas_key = None self.chain_of_custody_sas_key = None + self.key_encryption_key = None + self.expected_data_size_in_terabytes = expected_data_size_in_terabytes class DataBoxDiskJobDetails(JobDetails): @@ -983,24 +1178,20 @@ class DataBoxDiskJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -1012,6 +1203,11 @@ class DataBoxDiskJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :param preferred_disks: User preference on what size disks are needed for the job. The map is from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but will be checked against an int. @@ -1028,33 +1224,32 @@ class DataBoxDiskJobDetails(JobDetails): _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, 'disks_and_size_details': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, @@ -1065,15 +1260,16 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + expected_data_size_in_terabytes: Optional[int] = None, preferred_disks: Optional[Dict[str, int]] = None, passkey: Optional[str] = None, **kwargs ): - super(DataBoxDiskJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxDiskJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, expected_data_size_in_terabytes=expected_data_size_in_terabytes, **kwargs) self.job_details_type: str = 'DataBoxDisk' self.preferred_disks = preferred_disks self.copy_progress = None @@ -1087,22 +1283,29 @@ class JobSecrets(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { @@ -1111,13 +1314,12 @@ class JobSecrets(msrest.serialization.Model): def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): super(JobSecrets, self).__init__(**kwargs) self.job_secrets_type: Optional[str] = None - self.dc_access_security_code = dc_access_security_code + self.dc_access_security_code = None + self.error = None class DataBoxDiskJobSecrets(JobSecrets): @@ -1130,8 +1332,10 @@ class DataBoxDiskJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar disk_secrets: Contains the list of secrets object for that device. :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] :ivar pass_key: PassKey for the disk Job. @@ -1142,6 +1346,8 @@ class DataBoxDiskJobSecrets(JobSecrets): _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'disk_secrets': {'readonly': True}, 'pass_key': {'readonly': True}, 'is_passkey_user_defined': {'readonly': True}, @@ -1150,6 +1356,7 @@ class DataBoxDiskJobSecrets(JobSecrets): _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, 'pass_key': {'key': 'passKey', 'type': 'str'}, 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, @@ -1157,11 +1364,9 @@ class DataBoxDiskJobSecrets(JobSecrets): def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): - super(DataBoxDiskJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataBoxDiskJobSecrets, self).__init__(**kwargs) self.job_secrets_type: str = 'DataBoxDisk' self.disk_secrets = None self.pass_key = None @@ -1178,22 +1383,27 @@ class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator - :ivar account_name: Destination account name. + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, } def __init__( @@ -1204,6 +1414,7 @@ def __init__( self.copy_log_details_type: str = 'DataBoxHeavy' self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxHeavyJobDetails(JobDetails): @@ -1213,24 +1424,20 @@ class DataBoxHeavyJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -1242,41 +1449,50 @@ class DataBoxHeavyJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :ivar copy_progress: Copy progress per account. :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1285,14 +1501,15 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + expected_data_size_in_terabytes: Optional[int] = None, device_password: Optional[str] = None, **kwargs ): - super(DataBoxHeavyJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxHeavyJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, expected_data_size_in_terabytes=expected_data_size_in_terabytes, **kwargs) self.job_details_type: str = 'DataBoxHeavy' self.copy_progress = None self.device_password = device_password @@ -1308,30 +1525,33 @@ class DataBoxHeavyJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'cabinet_pod_secrets': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, } def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): - super(DataBoxHeavyJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataBoxHeavyJobSecrets, self).__init__(**kwargs) self.job_secrets_type: str = 'DataBoxHeavy' self.cabinet_pod_secrets = None @@ -1391,24 +1611,20 @@ class DataBoxJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. :type contact_details: ~data_box_management_client.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. + :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. :vartype return_package: ~data_box_management_client.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator @@ -1420,41 +1636,50 @@ class DataBoxJobDetails(JobDetails): :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :ivar key_encryption_key: Details about which key encryption type is being used. + :vartype key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_terabytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_terabytes: int :ivar copy_progress: Copy progress per storage account. :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'key_encryption_key': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1463,14 +1688,15 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + expected_data_size_in_terabytes: Optional[int] = None, device_password: Optional[str] = None, **kwargs ): - super(DataBoxJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, expected_data_size_in_terabytes=expected_data_size_in_terabytes, **kwargs) self.job_details_type: str = 'DataBox' self.copy_progress = None self.device_password = device_password @@ -1479,35 +1705,41 @@ def __init__( class DataboxJobSecrets(JobSecrets): """The secrets related to a databox job. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :param pod_secrets: Contains the list of secret objects for a job. :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, } def __init__( self, *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, pod_secrets: Optional[List["DataBoxSecret"]] = None, **kwargs ): - super(DataboxJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataboxJobSecrets, self).__init__(**kwargs) self.job_secrets_type: str = 'DataBox' self.pod_secrets = pod_secrets @@ -1520,13 +1752,14 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1537,6 +1770,7 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } _subtype_map = { @@ -1547,11 +1781,13 @@ def __init__( self, *, storage_location: str, + country: Optional[str] = None, **kwargs ): super(ScheduleAvailabilityRequest, self).__init__(**kwargs) self.storage_location = storage_location self.sku_name: Optional[str] = None + self.country = country class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1559,13 +1795,14 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1576,15 +1813,17 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( self, *, storage_location: str, + country: Optional[str] = None, **kwargs ): - super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) + super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) self.sku_name: str = 'DataBox' @@ -1636,276 +1875,252 @@ def __init__( self.account_credential_details = None -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, *, - destination_account_details: List["DestinationAccountDetails"], - location: str, + transfer_configuration: "TransferConfiguration", + account_details: "DataAccountDetails", + log_collection_level: Optional[Union[str, "LogCollectionLevel"]] = None, **kwargs ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type: str = 'ValidateDataDestinationDetails' - self.destination_account_details = destination_account_details - self.location = location + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = transfer_configuration + self.log_collection_level = log_collection_level + self.account_details = account_details -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. - - Variables are only populated by the server, and will be ignored when sending a request. +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~data_box_management_client.models.ValidationStatus + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, + *, + account_details: "DataAccountDetails", **kwargs ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type: str = 'ValidateDataDestinationDetails' - self.status = None + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = account_details -class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. - :type reverse_dc_access_code: str + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar data_location: Location of the data. + :vartype data_location: str + :ivar service_location: Location of the service. + :vartype service_location: str """ + _validation = { + 'data_location': {'readonly': True}, + 'service_location': {'readonly': True}, + } + _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'data_location': {'key': 'dataLocation', 'type': 'str'}, + 'service_location': {'key': 'serviceLocation', 'type': 'str'}, } def __init__( self, - *, - forward_dc_access_code: Optional[str] = None, - reverse_dc_access_code: Optional[str] = None, **kwargs ): - super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = forward_dc_access_code - self.reverse_dc_access_code = reverse_dc_access_code - + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None + self.service_location = None -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType """ _validation = { - 'data_destination_type': {'required': True}, + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, } def __init__( self, *, - account_id: Optional[str] = None, - share_password: Optional[str] = None, + device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], + data_export_details: Optional[List["DataExportDetails"]] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, **kwargs ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type: Optional[str] = None - self.account_id = account_id - self.share_password = share_password + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type: str = 'ValidateDataTransferDetails' + self.data_export_details = data_export_details + self.data_import_details = data_import_details + self.device_type = device_type + self.transfer_type = transfer_type + +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, - *, - resource_group_id: str, - staging_storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, **kwargs ): - super(DestinationManagedDiskDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type: str = 'ManagedDisk' - self.resource_group_id = resource_group_id - self.staging_storage_account_id = staging_storage_account_id - + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type: str = 'ValidateDataTransferDetails' + self.status = None -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. - All required parameters must be populated in order to send to Azure. +class DcAccessSecurityCode(msrest.serialization.Model): + """Dc access security code. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~data_box_management_client.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str + :param reverse_dc_access_code: Reverse Dc access security code. + :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str """ - _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, - } - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, } def __init__( self, *, - storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, + reverse_dc_access_code: Optional[str] = None, + forward_dc_access_code: Optional[str] = None, **kwargs ): - super(DestinationStorageAccountDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type: str = 'StorageAccount' - self.storage_account_id = storage_account_id + super(DcAccessSecurityCode, self).__init__(**kwargs) + self.reverse_dc_access_code = reverse_dc_access_code + self.forward_dc_access_code = forward_dc_access_code -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. +class Details(msrest.serialization.Model): + """Details. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar destination_location: Location of the destination. - :vartype destination_location: str - :ivar service_location: Location of the service. - :vartype service_location: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str """ _validation = { - 'destination_location': {'readonly': True}, - 'service_location': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, - 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, + *, + code: str, + message: str, **kwargs ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None - self.service_location = None + super(Details, self).__init__(**kwargs) + self.code = code + self.message = message class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1913,13 +2128,14 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to be transferred in this job, in terabytes. :type expected_data_size_in_terabytes: int @@ -1934,6 +2150,7 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, } @@ -1942,9 +2159,10 @@ def __init__( *, storage_location: str, expected_data_size_in_terabytes: int, + country: Optional[str] = None, **kwargs ): - super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) + super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) self.sku_name: str = 'DataBoxDisk' self.expected_data_size_in_terabytes = expected_data_size_in_terabytes @@ -1980,113 +2198,126 @@ def __init__( self.bit_locker_key = None -class Error(msrest.serialization.Model): - """Top level error for the job. +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, + *, + code: str, + message: str, + details: Optional[List["Details"]] = None, + target: Optional[str] = None, **kwargs ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + self.details = details + self.target = target -class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling heavy orders. +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~data_box_management_client.models.SkuName + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str """ _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, } _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, } def __init__( self, *, - storage_location: str, + filter_file_type: Union[str, "FilterFileType"], + filter_file_path: str, **kwargs ): - super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) - self.sku_name: str = 'DataBoxHeavy' + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = filter_file_type + self.filter_file_path = filter_file_path -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. +class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling heavy orders. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, } _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( self, + *, + storage_location: str, + country: Optional[str] = None, **kwargs ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None + super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) + self.sku_name: str = 'DataBoxHeavy' class Resource(msrest.serialization.Model): """Model of the Resource. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param location: Required. The location of the resource. This will be one of the supported and @@ -2099,17 +2330,28 @@ class Resource(msrest.serialization.Model): :type tags: dict[str, str] :param sku: Required. The sku type. :type sku: ~data_box_management_client.models.Sku + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str """ _validation = { 'location': {'required': True}, 'sku': {'required': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, } _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'type': {'key': 'identity.type', 'type': 'str'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, } def __init__( @@ -2118,12 +2360,16 @@ def __init__( location: str, sku: "Sku", tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, **kwargs ): super(Resource, self).__init__(**kwargs) self.location = location self.tags = tags self.sku = sku + self.type = type + self.principal_id = None + self.tenant_id = None class JobResource(Resource): @@ -2143,18 +2389,27 @@ class JobResource(Resource): :type tags: dict[str, str] :param sku: Required. The sku type. :type sku: ~data_box_management_client.models.Sku + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str :ivar name: Name of the object. :vartype name: str :ivar id: Id of the object. :vartype id: str :ivar type: Type of the object. :vartype type: str + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :ivar is_cancellable: Describes whether the job is cancellable or not. :vartype is_cancellable: bool :ivar is_deletable: Describes whether the job is deletable or not. :vartype is_deletable: bool :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", @@ -2164,7 +2419,7 @@ class JobResource(Resource): :ivar start_time: Time at which the job was started in UTC ISO 8601 format. :vartype start_time: ~datetime.datetime :ivar error: Top level error for the job. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :param details: Details of a job run. This field will only be sent for expand details filter. :type details: ~data_box_management_client.models.JobDetails :ivar cancellation_reason: Reason for cancellation. @@ -2181,12 +2436,16 @@ class JobResource(Resource): _validation = { 'location': {'required': True}, 'sku': {'required': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, 'name': {'readonly': True}, 'id': {'readonly': True}, 'type': {'readonly': True}, + 'transfer_type': {'required': True}, 'is_cancellable': {'readonly': True}, 'is_deletable': {'readonly': True}, 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'error': {'readonly': True}, @@ -2198,15 +2457,19 @@ class JobResource(Resource): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'details': {'key': 'properties.details', 'type': 'JobDetails'}, 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, @@ -2219,6 +2482,7 @@ def __init__( *, location: str, sku: "Sku", + transfer_type: Union[str, "TransferType"], tags: Optional[Dict[str, str]] = None, details: Optional["JobDetails"] = None, delivery_type: Optional[Union[str, "JobDeliveryType"]] = None, @@ -2229,9 +2493,11 @@ def __init__( self.name = None self.id = None self.type = None + self.transfer_type = transfer_type self.is_cancellable = None self.is_deletable = None self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None self.status = None self.start_time = None self.error = None @@ -2271,34 +2537,48 @@ def __init__( class JobResourceUpdateParameter(msrest.serialization.Model): """The JobResourceUpdateParameter. + Variables are only populated by the server, and will be ignored when sending a request. + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str :param details: Details of a job to be updated. :type details: ~data_box_management_client.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: - list[~data_box_management_client.models.DestinationAccountDetails] """ + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'type': {'key': 'identity.type', 'type': 'str'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, + type: Optional[str] = None, details: Optional["UpdateJobDetails"] = None, - destination_account_details: Optional[List["DestinationAccountDetails"]] = None, **kwargs ): super(JobResourceUpdateParameter, self).__init__(**kwargs) self.tags = tags + self.type = type + self.principal_id = None + self.tenant_id = None self.details = details - self.destination_account_details = destination_account_details class JobStages(msrest.serialization.Model): @@ -2315,14 +2595,13 @@ class JobStages(msrest.serialization.Model): :ivar display_name: Display name of the job stage. :vartype display_name: str :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings". :vartype stage_status: str or ~data_box_management_client.models.StageStatus :ivar stage_time: Time for the job stage in UTC ISO 8601 format. :vartype stage_time: ~datetime.datetime :ivar job_stage_details: Job Stage Details. :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~data_box_management_client.models.JobErrorDetails] """ _validation = { @@ -2331,7 +2610,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'readonly': True}, 'stage_time': {'readonly': True}, 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, } _attribute_map = { @@ -2340,7 +2618,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'key': 'stageStatus', 'type': 'str'}, 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, } def __init__( @@ -2353,7 +2630,94 @@ def __init__( self.stage_status = None self.stage_time = None self.job_stage_details = None - self.error_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + *, + kek_type: Union[str, "KekType"], + kek_url: Optional[str] = None, + kek_vault_resource_id: Optional[str] = None, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kek_type + self.kek_url = kek_url + self.kek_vault_resource_id = kek_vault_resource_id + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_group_id: str, + staging_storage_account_id: str, + share_password: Optional[str] = None, + **kwargs + ): + super(ManagedDiskDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type: str = 'ManagedDisk' + self.resource_group_id = resource_group_id + self.staging_storage_account_id = staging_storage_account_id class NotificationPreference(msrest.serialization.Model): @@ -2404,6 +2768,8 @@ class Operation(msrest.serialization.Model): :vartype properties: object :ivar origin: Origin of the operation. Can be : user|system|user,system. :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool """ _validation = { @@ -2418,10 +2784,13 @@ class Operation(msrest.serialization.Model): 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'properties': {'key': 'properties', 'type': 'object'}, 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( self, + *, + is_data_action: Optional[bool] = None, **kwargs ): super(Operation, self).__init__(**kwargs) @@ -2429,6 +2798,7 @@ def __init__( self.display = None self.properties = None self.origin = None + self.is_data_action = is_data_action class OperationDisplay(msrest.serialization.Model): @@ -2536,7 +2906,7 @@ def __init__( class Preferences(msrest.serialization.Model): """Preferences related to the order. - :param preferred_data_center_region: Preferred Data Center Region. + :param preferred_data_center_region: Preferred data center region. :type preferred_data_center_region: list[str] :param transport_preferences: Preferences related to the shipment logistics of the sku. :type transport_preferences: ~data_box_management_client.models.TransportPreferences @@ -2565,11 +2935,11 @@ class PreferencesValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. + :param preference: Preference of transport and data center. :type preference: ~data_box_management_client.models.Preferences :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". @@ -2608,12 +2978,12 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of requested data center and transport. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -2627,7 +2997,7 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2702,7 +3072,7 @@ def __init__( class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. + """Schedule availability for given sku in a region. Variables are only populated by the server, and will be ignored when sending a request. @@ -2961,20 +3331,19 @@ def __init__( class SkuAvailabilityValidationRequest(ValidationInputRequest): """Request to validate sku availability. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". :type device_type: str or ~data_box_management_client.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -2986,7 +3355,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): _validation = { 'validation_type': {'required': True}, 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -2999,12 +3368,11 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): 'location': {'key': 'location', 'type': 'str'}, } - transfer_type = "ImportToAzure" - def __init__( self, *, device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], country: str, location: str, **kwargs @@ -3012,6 +3380,7 @@ def __init__( super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) self.validation_type: str = 'ValidateSkuAvailability' self.device_type = device_type + self.transfer_type = transfer_type self.country = country self.location = location @@ -3024,12 +3393,12 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -3043,7 +3412,7 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3095,16 +3464,22 @@ class SkuCost(msrest.serialization.Model): :vartype meter_id: str :ivar meter_type: The type of the meter. :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float """ _validation = { 'meter_id': {'readonly': True}, 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, } _attribute_map = { 'meter_id': {'key': 'meterId', 'type': 'str'}, 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, } def __init__( @@ -3114,6 +3489,7 @@ def __init__( super(SkuCost, self).__init__(**kwargs) self.meter_id = None self.meter_type = None + self.multiplier = None class SkuInformation(msrest.serialization.Model): @@ -3125,9 +3501,9 @@ class SkuInformation(msrest.serialization.Model): :vartype sku: ~data_box_management_client.models.Sku :ivar enabled: The sku is enabled or not. :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~data_box_management_client.models.DestinationToServiceLocationMap] + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] :ivar capacity: Capacity of the Sku. :vartype capacity: ~data_box_management_client.models.SkuCapacity :ivar costs: Cost of the Sku. @@ -3146,7 +3522,7 @@ class SkuInformation(msrest.serialization.Model): _validation = { 'sku': {'readonly': True}, 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, 'capacity': {'readonly': True}, 'costs': {'readonly': True}, 'api_versions': {'readonly': True}, @@ -3158,7 +3534,7 @@ class SkuInformation(msrest.serialization.Model): _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, @@ -3174,7 +3550,7 @@ def __init__( super(SkuInformation, self).__init__(**kwargs) self.sku = None self.enabled = None - self.destination_to_service_location_map = None + self.data_location_to_service_location_map = None self.capacity = None self.costs = None self.api_versions = None @@ -3183,15 +3559,57 @@ def __init__( self.required_feature = None +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_account_id: str, + share_password: Optional[str] = None, + **kwargs + ): + super(StorageAccountDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type: str = 'StorageAccount' + self.storage_account_id = storage_account_id + + class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): """Request to validate subscription permission to create jobs. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ @@ -3219,12 +3637,12 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~data_box_management_client.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of subscription permission to create job. Possible values include: "Valid", "Invalid", "Skipped". :vartype status: str or ~data_box_management_client.models.ValidationStatus @@ -3238,7 +3656,7 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3251,6 +3669,172 @@ def __init__( self.status = None +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + transfer_all_blobs: Optional[bool] = None, + transfer_all_files: Optional[bool] = None, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.transfer_all_blobs = transfer_all_blobs + self.transfer_all_files = transfer_all_files + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + *, + transfer_configuration_type: Union[str, "TransferConfigurationType"], + transfer_filter_details: Optional["TransferConfigurationTransferFilterDetails"] = None, + transfer_all_details: Optional["TransferConfigurationTransferAllDetails"] = None, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = transfer_configuration_type + self.transfer_filter_details = transfer_filter_details + self.transfer_all_details = transfer_all_details + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferAllDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = include + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferFilterDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = include + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + blob_filter_details: Optional["BlobFilterDetails"] = None, + azure_file_filter_details: Optional["AzureFileFilterDetails"] = None, + filter_file_details: Optional[List["FilterFileDetails"]] = None, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.blob_filter_details = blob_filter_details + self.azure_file_filter_details = azure_file_filter_details + self.filter_file_details = filter_file_details + + class TransportAvailabilityDetails(msrest.serialization.Model): """Transport options availability details for given region. @@ -3394,11 +3978,14 @@ class UpdateJobDetails(msrest.serialization.Model): :type contact_details: ~data_box_management_client.models.ContactDetails :param shipping_address: Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey """ _attribute_map = { 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, } def __init__( @@ -3406,11 +3993,13 @@ def __init__( *, contact_details: Optional["ContactDetails"] = None, shipping_address: Optional["ShippingAddress"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, **kwargs ): super(UpdateJobDetails, self).__init__(**kwargs) self.contact_details = contact_details self.shipping_address = shipping_address + self.key_encryption_key = key_encryption_key class ValidateAddress(ValidationInputRequest): @@ -3419,9 +4008,9 @@ class ValidateAddress(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". + "ValidateSkuAvailability", "ValidateDataTransferDetails". :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param shipping_address: Required. Shipping address of the customer. :type shipping_address: ~data_box_management_client.models.ShippingAddress diff --git a/src/databox/azext_databox/vendored_sdks/databox/operations/_job_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_job_operations.py index 04b150c7e7e..c167985ba34 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/operations/_job_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_job_operations.py @@ -21,7 +21,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -67,7 +67,7 @@ def list( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -108,8 +108,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -140,7 +141,7 @@ def list_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -182,8 +183,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -218,7 +220,7 @@ def get( cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -246,7 +248,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -262,7 +265,9 @@ def _create_initial( job_name, # type: str location, # type: str sku, # type: "models.Sku" + transfer_type, # type: Union[str, "models.TransferType"] tags=None, # type: Optional[Dict[str, str]] + type=None, # type: Optional[str] details=None, # type: Optional["models.JobDetails"] delivery_type=None, # type: Optional[Union[str, "models.JobDeliveryType"]] scheduled_date_time=None, # type: Optional[datetime.datetime] @@ -273,8 +278,8 @@ def _create_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _job_resource = models.JobResource(location=location, tags=tags, sku=sku, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time) - api_version = "2019-09-01" + _job_resource = models.JobResource(location=location, tags=tags, sku=sku, type=type, transfer_type=transfer_type, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time) + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -306,7 +311,8 @@ def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -324,7 +330,9 @@ def begin_create( job_name, # type: str location, # type: str sku, # type: "models.Sku" + transfer_type, # type: Union[str, "models.TransferType"] tags=None, # type: Optional[Dict[str, str]] + type=None, # type: Optional[str] details=None, # type: Optional["models.JobDetails"] delivery_type=None, # type: Optional[Union[str, "models.JobDeliveryType"]] scheduled_date_time=None, # type: Optional[datetime.datetime] @@ -345,9 +353,13 @@ def begin_create( :type location: str :param sku: The sku type. :type sku: ~data_box_management_client.models.Sku + :param transfer_type: Type of the data transfer. + :type transfer_type: str or ~data_box_management_client.models.TransferType :param tags: The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str :param details: Details of a job run. This field will only be sent for expand details filter. :type details: ~data_box_management_client.models.JobDetails :param delivery_type: Delivery type of Job. @@ -374,7 +386,9 @@ def begin_create( job_name=job_name, location=location, sku=sku, + transfer_type=transfer_type, tags=tags, + type=type, details=details, delivery_type=delivery_type, scheduled_date_time=scheduled_date_time, @@ -408,7 +422,7 @@ def _delete_initial( cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore @@ -431,9 +445,10 @@ def _delete_initial( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -495,8 +510,8 @@ def _update_initial( job_name, # type: str if_match=None, # type: Optional[str] tags=None, # type: Optional[Dict[str, str]] + type=None, # type: Optional[str] details=None, # type: Optional["models.UpdateJobDetails"] - destination_account_details=None, # type: Optional[List["models.DestinationAccountDetails"]] **kwargs # type: Any ): # type: (...) -> "models.JobResource" @@ -504,8 +519,8 @@ def _update_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _job_resource_update_parameter = models.JobResourceUpdateParameter(tags=tags, details=details, destination_account_details=destination_account_details) - api_version = "2019-09-01" + _job_resource_update_parameter = models.JobResourceUpdateParameter(tags=tags, type=type, details=details) + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -539,7 +554,8 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -557,8 +573,8 @@ def begin_update( job_name, # type: str if_match=None, # type: Optional[str] tags=None, # type: Optional[Dict[str, str]] + type=None, # type: Optional[str] details=None, # type: Optional["models.UpdateJobDetails"] - destination_account_details=None, # type: Optional[List["models.DestinationAccountDetails"]] **kwargs # type: Any ): # type: (...) -> LROPoller @@ -575,10 +591,10 @@ def begin_update( :param tags: The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param type: Identity type. + :type type: str :param details: Details of a job to be updated. :type details: ~data_box_management_client.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~data_box_management_client.models.DestinationAccountDetails] :keyword callable cls: A custom type or function that will be passed the direct response :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy @@ -599,8 +615,8 @@ def begin_update( job_name=job_name, if_match=if_match, tags=tags, + type=type, details=details, - destination_account_details=destination_account_details, cls=lambda x,y,z: x, **kwargs ) @@ -656,7 +672,7 @@ def book_shipment_pick_up( error_map.update(kwargs.pop('error_map', {})) _shipment_pick_up_request = models.ShipmentPickUpRequest(start_time=start_time, end_time=end_time, shipment_location=shipment_location) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -688,7 +704,8 @@ def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -725,7 +742,7 @@ def cancel( error_map.update(kwargs.pop('error_map', {})) _cancellation_reason = models.CancellationReason(reason=reason) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -756,7 +773,8 @@ def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -785,7 +803,7 @@ def list_credentials( cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -826,8 +844,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/operations/_operation_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_operation_operations.py index f98fa2876bd..a8fd3807363 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/operations/_operation_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_operation_operations.py @@ -60,7 +60,7 @@ def list( cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" def prepare_request(next_link=None): if not next_link: @@ -95,8 +95,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py index c3ec781bf1d..38c9f58bb27 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py @@ -45,97 +45,11 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def list_available_sku( - self, - location, # type: str - country, # type: str - available_sku_request_location, # type: str - sku_names=None, # type: Optional[List[Union[str, "models.SkuName"]]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.AvailableSkusResult"] - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param country: ISO country code. Country for hardware shipment. For codes check: - https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. - :type country: str - :param available_sku_request_location: Location for data transfer. For locations check: - https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. - :type available_sku_request_location: str - :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~data_box_management_client.models.SkuName] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - _available_sku_request = models.AvailableSkuRequest(country=country, location=available_sku_request_location, sku_names=sku_names) - api_version = "2019-09-01" - content_type = "application/json" - transfer_type = "ImportToAzure" - - def prepare_request(next_link=None): - if not next_link: - # Construct URL - url = self.list_available_sku.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - # Construct and send request - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(_available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_available_sku.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_sku_by_resource_group( self, resource_group_name, # type: str location, # type: str + transfer_type, # type: Union[str, "models.TransferType"] country, # type: str available_sku_request_location, # type: str sku_names=None, # type: Optional[List[Union[str, "models.SkuName"]]] @@ -148,6 +62,8 @@ def list_available_sku_by_resource_group( :type resource_group_name: str :param location: The location of the resource. :type location: str + :param transfer_type: Type of the transfer. + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -164,10 +80,9 @@ def list_available_sku_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - _available_sku_request = models.AvailableSkuRequest(country=country, location=available_sku_request_location, sku_names=sku_names) - api_version = "2019-09-01" + _available_sku_request = models.AvailableSkuRequest(transfer_type=transfer_type, country=country, location=available_sku_request_location, sku_names=sku_names) + api_version = "2020-04-01" content_type = "application/json" - transfer_type = "ImportToAzure" def prepare_request(next_link=None): if not next_link: @@ -213,8 +128,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -233,7 +149,7 @@ def validate_address( **kwargs # type: Any ): # type: (...) -> "models.AddressValidationOutput" - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer shipping address and provide alternate addresses if any. + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str @@ -255,7 +171,7 @@ def validate_address( error_map.update(kwargs.pop('error_map', {})) _validate_address = models.ValidateAddress(validation_type=validation_type, shipping_address=shipping_address, device_type=device_type, preferred_shipment_type=preferred_shipment_type) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -286,7 +202,8 @@ def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -320,7 +237,7 @@ def validate_input_by_resource_group( cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -352,7 +269,8 @@ def validate_input_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -383,7 +301,7 @@ def validate_input( cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -414,7 +332,8 @@ def validate_input( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -432,7 +351,7 @@ def region_configuration( **kwargs # type: Any ): # type: (...) -> "models.RegionConfigurationResponse" - """This API provides configuration details specific to given region/location. + """This API provides configuration details specific to given region/location at Subscription level. :param location: The location of the resource. :type location: str @@ -451,7 +370,7 @@ def region_configuration( error_map.update(kwargs.pop('error_map', {})) _region_configuration_request = models.RegionConfigurationRequest(schedule_availability_request=schedule_availability_request, sku_name=sku_name) - api_version = "2019-09-01" + api_version = "2020-04-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL @@ -482,7 +401,8 @@ def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -491,3 +411,76 @@ def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + def region_configuration_by_resource_group( + self, + resource_group_name, # type: str + location, # type: str + schedule_availability_request=None, # type: Optional["models.ScheduleAvailabilityRequest"] + sku_name=None, # type: Optional[Union[str, "models.SkuName"]] + **kwargs # type: Any + ): + # type: (...) -> "models.RegionConfigurationResponse" + """This API provides configuration details specific to given region/location at Resource group level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param schedule_availability_request: Request body to get the availability for scheduling + orders. + :type schedule_availability_request: ~data_box_management_client.models.ScheduleAvailabilityRequest + :param sku_name: Type of the device. + :type sku_name: str or ~data_box_management_client.models.SkuName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _region_configuration_request = models.RegionConfigurationRequest(schedule_availability_request=schedule_availability_request, sku_name=sku_name) + api_version = "2020-04-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/report.md b/src/databox/report.md index 3ee425917f5..6a96b8ac53b 100644 --- a/src/databox/report.md +++ b/src/databox/report.md @@ -30,7 +30,9 @@ create a databox job. |**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name| |**--location**|string|The location of the resource. This will be one of the supported and registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a resource cannot be changed once it is created, but if an identical region is specified on update the request will succeed.|location| |**--sku**|object|The sku type.|sku| +|**--transfer-type**|sealed-choice|Type of the data transfer.|transfer_type| |**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags| +|**--identity-type**|string|Identity type|type| |**--details**|object|Details of a job run. This field will only be sent for expand details filter.|details| |**--delivery-type**|sealed-choice|Delivery type of Job.|delivery_type| |**--delivery-info-scheduled-date-time**|date-time|Scheduled date time.|scheduled_date_time| @@ -77,17 +79,8 @@ update a databox job. |**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name| |**--if-match**|string|Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value.|if_match| |**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags| +|**--identity-type**|string|Identity type|type| |**--details**|object|Details of a job to be updated.|details| -|**--destination-account-details**|array|Destination account details.|destination_account_details| -### databox service list-available-sku - -list-available-sku a databox service. - -|Option|Type|Description|Path (SDK)|Path (swagger)| -|------|----|-----------|----------|--------------| -|**--location**|string|The location of the resource|location| -|**--country**|string|ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements|country| -|**--sku-names**|array|Sku Names to filter for available skus|sku_names| ### databox service list-available-sku-by-resource-group list-available-sku-by-resource-group a databox service. @@ -96,6 +89,7 @@ list-available-sku-by-resource-group a databox service. |------|----|-----------|----------|--------------| |**--resource-group-name**|string|The Resource Group Name|resource_group_name| |**--location**|string|The location of the resource|location| +|**--transfer-type**|sealed-choice|Type of the transfer.|transfer_type| |**--country**|string|ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements|country| |**--sku-names**|array|Sku Names to filter for available skus|sku_names| ### databox service region-configuration @@ -109,6 +103,18 @@ region-configuration a databox service. |**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request| |**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request| |**--transport-availability-request-sku-name**|sealed-choice|Type of the device.|sku_name| +### databox service region-configuration-by-resource-group + +region-configuration-by-resource-group a databox service. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name| +|**--location**|string|The location of the resource|location| +|**--data-box-schedule-availability-request**|object|Request body to get the availability for scheduling data box orders orders.|data_box_schedule_availability_request| +|**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request| +|**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request| +|**--transport-availability-request-sku-name**|sealed-choice|Type of the device.|sku_name| ### databox service validate-address validate-address a databox service.